diff --git a/google/cloud/pubsub_v1/gapic/__init__.py b/.github/snippet-bot.yml similarity index 100% rename from google/cloud/pubsub_v1/gapic/__init__.py rename to .github/snippet-bot.yml diff --git a/.gitignore b/.gitignore index b87e1ed58..b9daa52f1 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 6a68ebd10..95bc0a438 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000..412b0b56a --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000..d653dd868 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Ɓukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index b602fa542..7815c2d6a 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000..111810782 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index f462c727b..8acb14e80 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-pubsub - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000..719bcd5ba --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 000000000..995ee2911 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/README.rst b/README.rst index a92a43087..926e51f1e 100644 --- a/README.rst +++ b/README.rst @@ -60,11 +60,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-pubsub==1.7.0. Mac/Linux diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 000000000..9ab5d073a --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,160 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-pubsub` client is a significant upgrade based +on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), +and includes substantial interface changes. Existing code written for earlier versions +of this library will likely require updates to use this version. This document +describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an +[issue](https://github.com/googleapis/python-pubsub/issues). + + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Almost all methods that send requests to the backend expect request objects. We +provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-pubsub +``` + +* The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ scripts/fixup_pubsub_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = "projects/{}".format(PROJECT_ID) +topics = publisher.list_topics(project_path) +``` + + +**After:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = f"projects/{PROJECT_ID}" +topics = publisher.list_topics(request={"project": project_path}) +``` + +### More Details + +In `google-cloud-pubsub<2.0.0`, parameters required by the API were positional +parameters and optional parameters were keyword parameters. + +**Before:** +```py + def list_topics( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, almost all methods that interact with the backend have a single +positional parameter `request`. Method docstrings indicate whether a parameter is +required or optional. + +> **NOTE:** The exception are hand written methods such as `publisher.publish()` and +> `subscriber.subscribe()` that implement additional logic (e.g. request batching) and +> sit on top of the API methods from the generated parts of the library. The signatures +> of these methods have in large part been preserved. + +Some methods have additional keyword only parameters. The available parameters depend +on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-pubsub/blob/master/google/cloud/pubsub_v1/proto/pubsub.proto#L88) +specified by the API producer. + + +**After:** +```py + def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are +> mutually exclusive. Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.list_topics( + request={ + "project": project_path, + "metadata": [("foo", "bar"), ("baz", "quux")], + } +) +``` + +```py +response = client.list_topics( + project=project_path, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +This call is invalid because it mixes `request` with a keyword argument `metadata`. +Executing this code will result in an error: + +```py +response = client.synthesize_speech( + request={"project": project_path}, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +> **NOTE:** The `request` parameter of some methods can also contain a more rich set of +> options that are otherwise not available as explicit keyword only parameters, thus +> these _must_ be passed through `request`. + + +## Removed Utility Methods + +> **WARNING**: Breaking change + +Some utility methods such as publisher client's `subscription_path()` have been removed +and now only exist in the relevant client, e.g. `subscriber.subscription_path()`. + +The `project_path()` method has been removed from both the publisher and subscriber +client, this path must now be constructed manually: +```py +project_path = f"project/{PROJECT_ID}" +``` + +## Removed `client_config` Parameter + +The publisher and subscriber clients cannot be constructed with `client_config` +argument anymore. If you want to customize retry and timeout settings for a particular +method, you need to do it upon method invocation by passing the custom `timeout` and +`retry` arguments, respectively. diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 000000000..01097c8c0 --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index fb9f1ca32..7bd17033d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -90,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/docs/index.rst b/docs/index.rst index ae3053625..06b09605f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,15 +2,34 @@ .. include:: multiprocessing.rst + API Documentation ----------------- +.. note:: + + The client library version (currently ``2.x``) should not be confused with the + backend API version (currently ``v1``), hence some references to ``v1`` can be found + across the documentation. + +.. toctree:: + :maxdepth: 3 + + Publisher Client + Subscriber Client + Types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + .. toctree:: - :maxdepth: 3 + :maxdepth: 2 + + UPGRADING - publisher/index - subscriber/index - types Changelog --------- diff --git a/docs/publisher/api/client.rst b/docs/publisher/api/client.rst index 47a3aa3d5..d1a54ff5e 100644 --- a/docs/publisher/api/client.rst +++ b/docs/publisher/api/client.rst @@ -1,5 +1,5 @@ -Publisher Client API -==================== +Publisher Client API (v1) +========================= .. automodule:: google.cloud.pubsub_v1.publisher.client :members: diff --git a/docs/subscriber/api/client.rst b/docs/subscriber/api/client.rst index 965880c5a..d26243eba 100644 --- a/docs/subscriber/api/client.rst +++ b/docs/subscriber/api/client.rst @@ -1,5 +1,5 @@ -Subscriber Client API -===================== +Subscriber Client API (v1) +========================== .. automodule:: google.cloud.pubsub_v1.subscriber.client :members: diff --git a/google/cloud/pubsub_v1/gapic/publisher_client.py b/google/cloud/pubsub_v1/gapic/publisher_client.py deleted file mode 100644 index e8853d841..000000000 --- a/google/cloud/pubsub_v1/gapic/publisher_client.py +++ /dev/null @@ -1,1292 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Publisher API.""" - -import collections -from copy import deepcopy -import functools -import pkg_resources -import six -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.pubsub_v1.gapic import publisher_client_config -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - - -class PublisherClient(object): - """ - The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Publisher" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.PublisherGrpcTransport, - Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=publisher_grpc_transport.PublisherGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_topic( - self, - name, - labels=None, - message_storage_policy=None, - kms_key_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates the given topic with the given name. See the resource name - rules. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> name = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_topic(name) - - Args: - name (str): Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a - letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), - dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - labels (dict[str -> str]): See Creating and - managing labels. - message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining the set of Google Cloud Platform regions where messages - published to the topic may be stored. If not present, then no constraints - are in effect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` - kms_key_name (str): The resource name of the Cloud KMS CryptoKey to be used to protect - access to messages published on this topic. - - The expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "create_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_topic, - default_retry=self._method_configs["CreateTopic"].retry, - default_timeout=self._method_configs["CreateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Topic( - name=name, - labels=labels, - message_storage_policy=message_storage_policy, - kms_key_name=kms_key_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_topic( - self, - topic, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic_name = 'projects/my-project/topics/my-topic' - >>> topic_labels = {'source': 'external'} - >>> topic = {'name': topic_name, 'labels': topic_labels} - >>> - >>> paths_element = 'labels' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_topic(topic, update_mask) - - Args: - topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): Required. The updated topic object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. - Must be specified and non-empty. Note that if ``update_mask`` contains - "message_storage_policy" but the ``message_storage_policy`` is not set - in the ``topic`` provided above, then the updated value is determined by - the policy configured at the project or organization level. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "update_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_topic, - default_retry=self._method_configs["UpdateTopic"].retry, - default_timeout=self._method_configs["UpdateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic.name", topic.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def publish( - self, - topic, - messages, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> data = b'' - >>> messages_element = {'data': data} - >>> messages = [messages_element] - >>> - >>> response = client.publish(topic, messages) - - Args: - topic (str): Required. The messages in the request will be published on this - topic. Format is ``projects/{project}/topics/{topic}``. - messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): Required. The messages to publish. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "publish" not in self._inner_api_calls: - self._inner_api_calls[ - "publish" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.publish, - default_retry=self._method_configs["Publish"].retry, - default_timeout=self._method_configs["Publish"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["publish"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration of a topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.get_topic(topic) - - Args: - topic (str): Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "get_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_topic, - default_retry=self._method_configs["GetTopic"].retry, - default_timeout=self._method_configs["GetTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_topics( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching topics. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topics(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topics(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list topics. Format is - ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topics" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topics" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topics, - default_retry=self._method_configs["ListTopics"].retry, - default_timeout=self._method_configs["ListTopics"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topics"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="topics", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_subscriptions( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the attached subscriptions on this topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_subscriptions(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_subscriptions(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_subscriptions, - default_retry=self._method_configs["ListTopicSubscriptions"].retry, - default_timeout=self._method_configs["ListTopicSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_snapshots( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_snapshots(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_snapshots(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that snapshots are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_snapshots, - default_retry=self._method_configs["ListTopicSnapshots"].retry, - default_timeout=self._method_configs["ListTopicSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSnapshotsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> client.delete_topic(topic) - - Args: - topic (str): Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_topic, - default_retry=self._method_configs["DeleteTopic"].retry, - default_timeout=self._method_configs["DeleteTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def detach_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.detach_subscription(subscription) - - Args: - subscription (str): Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.DetachSubscriptionResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "detach_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "detach_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.detach_subscription, - default_retry=self._method_configs["DetachSubscription"].retry, - default_timeout=self._method_configs["DetachSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["detach_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/google/cloud/pubsub_v1/gapic/publisher_client_config.py deleted file mode 100644 index 8c96fd10b..000000000 --- a/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ /dev/null @@ -1,111 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent2": [], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "none": [], - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.3, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - }, - "methods": { - "CreateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Publish": { - "timeout_millis": 20000, - "retry_codes_name": "publish", - "retry_params_name": "messaging", - "bundling": { - "element_count_threshold": 100, - "element_count_limit": 1000, - "request_byte_threshold": 1048576, - "request_byte_limit": 10485760, - "delay_threshold_millis": 10, - }, - }, - "GetTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "DeleteTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DetachSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent2", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/pubsub_v1/gapic/subscriber_client.py b/google/cloud/pubsub_v1/gapic/subscriber_client.py deleted file mode 100644 index 1e24ba02a..000000000 --- a/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ /dev/null @@ -1,2006 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Subscriber API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.pubsub_v1.gapic import subscriber_client_config -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -class SubscriberClient(object): - """ - The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or by - establishing a bi-directional stream using the ``StreamingPull`` method. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Subscriber" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def snapshot_path(cls, project, snapshot): - """Return a fully-qualified snapshot string.""" - return google.api_core.path_template.expand( - "projects/{project}/snapshots/{snapshot}", - project=project, - snapshot=snapshot, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.SubscriberGrpcTransport, - Callable[[~.Credentials, type], ~.SubscriberGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = subscriber_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=subscriber_grpc_transport.SubscriberGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_subscription( - self, - name, - topic, - push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - labels=None, - enable_message_ordering=None, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, - detached=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_subscription(name, topic) - - Args: - name (str): Required. The name of the subscription. It must have the format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores - (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, and it must - not start with ``"goog"``. - topic (str): Required. The name of the topic from which this subscription is - receiving messages. Format is ``projects/{project}/topics/{topic}``. The - value of this field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used - to configure it. An empty ``pushConfig`` signifies that the subscriber - will pull and ack messages using API methods. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub - waits for the subscriber to acknowledge receipt before resending the - message. In the interval after the message is delivered and before it is - acknowledged, it is considered to be outstanding. During that time - period, the message will not be redelivered (on a best-effort basis). - - For pull subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using - non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The - minimum custom deadline you can specify is 10 seconds. The maximum - custom deadline you can specify is 600 seconds (10 minutes). If this - parameter is 0, a default value of 10 seconds is used. - - For push delivery, this value is also used to set the request timeout - for the call to the push endpoint. - - If the subscriber never acknowledges the message, the Pub/Sub system - will eventually redeliver the message. - retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then - messages are not expunged from the subscription's backlog, even if they - are acknowledged, until they fall out of the - ``message_retention_duration`` window. This must be true if you would - like to Seek to a timestamp. - message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's - backlog, from the moment a message is published. If - ``retain_acked_messages`` is true, then this also configures the - retention of acknowledged messages, and thus configures how far back in - time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 - days or less than 10 minutes. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): See Creating and - managing labels. - enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the order in - which they are received by the Pub/Sub system. Otherwise, they may be - delivered in any order. EXPERIMENTAL: This feature is part of a closed - alpha release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's - expiration. A subscription is considered active as long as any connected - subscriber is successfully consuming messages from the subscription or - is issuing operations on the subscription. If ``expiration_policy`` is - not set, a *default policy* with ``ttl`` of 31 days will be used. The - minimum allowed value for ``expiration_policy.ttl`` is 1 day. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` - filter_ (str): An expression written in the Pub/Sub `filter - language `__. If - non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field - matches the filter are delivered on this subscription. If empty, then no - messages are filtered out. - dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages - in this subscription. If dead_letter_policy is not set, dead lettering - is disabled. - - The Cloud Pub/Sub service account associated with this subscriptions's - parent project (i.e., - service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must - have permission to Acknowledge() messages on this subscription. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` - retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Pub/Sub retries message delivery for this - subscription. - - If not set, the default retry policy is applied. This generally implies - that messages will be retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.RetryPolicy` - detached (bool): Indicates whether the subscription is detached from its topic. - Detached subscriptions don't receive messages from their topic and don't - retain any backlog. ``Pull`` and ``StreamingPull`` requests will return - FAILED_PRECONDITION. If the subscription is a push subscription, pushes - to the endpoint will not be made. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "create_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_subscription, - default_retry=self._method_configs["CreateSubscription"].retry, - default_timeout=self._method_configs["CreateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Subscription( - name=name, - topic=topic, - push_config=push_config, - ack_deadline_seconds=ack_deadline_seconds, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - labels=labels, - enable_message_ordering=enable_message_ordering, - expiration_policy=expiration_policy, - filter=filter_, - dead_letter_policy=dead_letter_policy, - retry_policy=retry_policy, - detached=detached, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.get_subscription(subscription) - - Args: - subscription (str): Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "get_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_subscription, - default_retry=self._method_configs["GetSubscription"].retry, - default_timeout=self._method_configs["GetSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_subscription( - self, - subscription, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> ack_deadline_seconds = 42 - >>> subscription_name = 'projects/my-project/subscriptions/my-subscription' - >>> subscription = { - ... 'name': subscription_name, - ... 'ack_deadline_seconds': ack_deadline_seconds, - ... } - >>> paths_element = 'ack_deadline_seconds' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_subscription(subscription, update_mask) - - Args: - subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): Required. The updated subscription object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Subscription` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided subscription to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "update_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_subscription, - default_retry=self._method_configs["UpdateSubscription"].retry, - default_timeout=self._method_configs["UpdateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription.name", subscription.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_subscriptions( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching subscriptions. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_subscriptions(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_subscriptions(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list subscriptions. - Format is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_subscriptions, - default_retry=self._method_configs["ListSubscriptions"].retry, - default_timeout=self._method_configs["ListSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> client.delete_subscription(subscription) - - Args: - subscription (str): Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_subscription, - default_retry=self._method_configs["DeleteSubscription"].retry, - default_timeout=self._method_configs["DeleteSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> response = client.get_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "get_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_snapshot, - default_retry=self._method_configs["GetSnapshot"].retry, - default_timeout=self._method_configs["GetSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_ack_deadline( - self, - subscription, - ack_ids, - ack_deadline_seconds, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> # TODO: Initialize `ack_deadline_seconds`: - >>> ack_deadline_seconds = 0 - >>> - >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. List of acknowledgment IDs. - ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request - was sent to the Pub/Sub system. For example, if the value is 10, the new - ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero might immediately make the message available - for delivery to another subscriber client. This typically results in an - increase in the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum deadline you - can specify is 600 seconds (10 minutes). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_ack_deadline" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_ack_deadline" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_ack_deadline, - default_retry=self._method_configs["ModifyAckDeadline"].retry, - default_timeout=self._method_configs["ModifyAckDeadline"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_ack_deadline"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def acknowledge( - self, - subscription, - ack_ids, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> client.acknowledge(subscription, ack_ids) - - Args: - subscription (str): Required. The subscription whose message is being acknowledged. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged - that was returned by the Pub/Sub system in the ``Pull`` response. Must - not be empty. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "acknowledge" not in self._inner_api_calls: - self._inner_api_calls[ - "acknowledge" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.acknowledge, - default_retry=self._method_configs["Acknowledge"].retry, - default_timeout=self._method_configs["Acknowledge"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["acknowledge"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pull( - self, - subscription, - max_messages, - return_immediately=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `max_messages`: - >>> max_messages = 0 - >>> - >>> response = client.pull(subscription, max_messages) - - Args: - subscription (str): Required. The subscription from which messages should be pulled. - Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): Required. The maximum number of messages to return for this request. Must - be a positive integer. The Pub/Sub system may return fewer than the number - specified. - return_immediately (bool): Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to return in the - ``Pull`` response. Otherwise, the system may wait (for a bounded amount - of time) until at least one message is available, rather than returning - no messages. Warning: setting this field to ``true`` is discouraged - because it adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PullResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pull" not in self._inner_api_calls: - self._inner_api_calls["pull"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pull, - default_retry=self._method_configs["Pull"].retry, - default_timeout=self._method_configs["Pull"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PullRequest( - subscription=subscription, - max_messages=max_messages, - return_immediately=return_immediately, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pull"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def streaming_pull( - self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `stream_ack_deadline_seconds`: - >>> stream_ack_deadline_seconds = 0 - >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} - >>> - >>> requests = [request] - >>> for element in client.streaming_pull(requests): - ... # process element - ... pass - - Args: - requests (iterator[dict|google.cloud.pubsub_v1.proto.pubsub_pb2.StreamingPullRequest]): The input objects. If a dict is provided, it must be of the - same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.StreamingPullRequest` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "streaming_pull" not in self._inner_api_calls: - self._inner_api_calls[ - "streaming_pull" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.streaming_pull, - default_retry=self._method_configs["StreamingPull"].retry, - default_timeout=self._method_configs["StreamingPull"].timeout, - client_info=self._client_info, - ) - - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False - - return self._inner_api_calls["streaming_pull"]( - requests, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_push_config( - self, - subscription, - push_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `push_config`: - >>> push_config = {} - >>> - >>> client.modify_push_config(subscription, push_config) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub system should stop - pushing messages from the given subscription and allow messages to be - pulled and acknowledged - effectively pausing the subscription if - ``Pull`` or ``StreamingPull`` is not called. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_push_config" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_push_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_push_config, - default_retry=self._method_configs["ModifyPushConfig"].retry, - default_timeout=self._method_configs["ModifyPushConfig"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_push_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_snapshots( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_snapshots(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_snapshots(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list snapshots. Format - is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_snapshots, - default_retry=self._method_configs["ListSnapshots"].retry, - default_timeout=self._method_configs["ListSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_snapshot( - self, - name, - subscription, - labels=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.create_snapshot(name, subscription) - - Args: - name (str): Required. User-provided name for this snapshot. If the name is not - provided in the request, the server will assign a random name for this - snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name rules. Format - is ``projects/{project}/snapshots/{snap}``. - subscription (str): Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: (a) The - existing backlog on the subscription. More precisely, this is defined as - the messages in the subscription's backlog that are unacknowledged upon - the successful completion of the ``CreateSnapshot`` request; as well as: - (b) Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): See Creating and - managing labels. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "create_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_snapshot, - default_retry=self._method_configs["CreateSnapshot"].retry, - default_timeout=self._method_configs["CreateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription, labels=labels, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_snapshot( - self, - snapshot, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> seconds = 123456 - >>> expire_time = {'seconds': seconds} - >>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' - >>> snapshot = { - ... 'name': snapshot_name, - ... 'expire_time': expire_time, - ... } - >>> paths_element = 'expire_time' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_snapshot(snapshot, update_mask) - - Args: - snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): Required. The updated snapshot object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Snapshot` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided snapshot to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "update_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_snapshot, - default_retry=self._method_configs["UpdateSnapshot"].retry, - default_timeout=self._method_configs["UpdateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot.name", snapshot.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> client.delete_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_snapshot, - default_retry=self._method_configs["DeleteSnapshot"].retry, - default_timeout=self._method_configs["DeleteSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def seek( - self, - subscription, - time=None, - snapshot=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.seek(subscription) - - Args: - subscription (str): Required. The subscription to affect. - time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. Messages retained in the subscription that were - published before this time are marked as acknowledged, and messages - retained in the subscription that were published after this time are - marked as unacknowledged. Note that this operation affects only those - messages retained in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). For - example, if ``time`` corresponds to a point before the message retention - window (or to a point before the system's notion of the subscription - creation time), only retained messages will be marked as unacknowledged, - and already-expunged messages will not be restored. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Timestamp` - snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as - that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.SeekResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "seek" not in self._inner_api_calls: - self._inner_api_calls["seek"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.seek, - default_retry=self._method_configs["Seek"].retry, - default_timeout=self._method_configs["Seek"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - time=time, snapshot=snapshot, - ) - - request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["seek"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/google/cloud/pubsub_v1/gapic/subscriber_client_config.py deleted file mode 100644 index fc3254975..000000000 --- a/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ /dev/null @@ -1,144 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Subscriber": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "streaming_pull": [ - "ABORTED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 25000, - "total_timeout_millis": 600000, - }, - "streaming_messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 600000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 600000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "CreateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "ModifyAckDeadline": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Acknowledge": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "messaging", - }, - "Pull": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "messaging", - }, - "StreamingPull": { - "timeout_millis": 900000, - "retry_codes_name": "streaming_pull", - "retry_params_name": "streaming_messaging", - }, - "ModifyPushConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Seek": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/pubsub_v1/gapic/transports/__init__.py b/google/cloud/pubsub_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py deleted file mode 100644 index bdba63555..000000000 --- a/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ /dev/null @@ -1,296 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class PublisherGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Publisher API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "publisher_stub": pubsub_pb2_grpc.PublisherStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.create_topic`. - - Creates the given topic with the given name. See the resource name - rules. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].CreateTopic - - @property - def update_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.update_topic`. - - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].UpdateTopic - - @property - def publish(self): - """Return the gRPC stub for :meth:`PublisherClient.publish`. - - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].Publish - - @property - def get_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.get_topic`. - - Gets the configuration of a topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].GetTopic - - @property - def list_topics(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topics`. - - Lists matching topics. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopics - - @property - def list_topic_subscriptions(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_subscriptions`. - - Lists the names of the attached subscriptions on this topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSubscriptions - - @property - def list_topic_snapshots(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_snapshots`. - - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSnapshots - - @property - def delete_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.delete_topic`. - - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DeleteTopic - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`PublisherClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions - - @property - def detach_subscription(self): - """Return the gRPC stub for :meth:`PublisherClient.detach_subscription`. - - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DetachSubscription diff --git a/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py deleted file mode 100644 index cd7a19bbe..000000000 --- a/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ /dev/null @@ -1,454 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class SubscriberGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Subscriber API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "subscriber_stub": pubsub_pb2_grpc.SubscriberStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_subscription`. - - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSubscription - - @property - def get_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_subscription`. - - Gets the configuration details of a subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSubscription - - @property - def update_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_subscription`. - - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSubscription - - @property - def list_subscriptions(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_subscriptions`. - - Lists matching subscriptions. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSubscriptions - - @property - def delete_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_subscription`. - - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSubscription - - @property - def get_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_snapshot`. - - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSnapshot - - @property - def modify_ack_deadline(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. - - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyAckDeadline - - @property - def acknowledge(self): - """Return the gRPC stub for :meth:`SubscriberClient.acknowledge`. - - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Acknowledge - - @property - def pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.pull`. - - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Pull - - @property - def streaming_pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.streaming_pull`. - - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].StreamingPull - - @property - def modify_push_config(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_push_config`. - - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyPushConfig - - @property - def list_snapshots(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_snapshots`. - - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSnapshots - - @property - def create_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. - - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSnapshot - - @property - def update_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_snapshot`. - - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSnapshot - - @property - def delete_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_snapshot`. - - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSnapshot - - @property - def seek(self): - """Return the gRPC stub for :meth:`SubscriberClient.seek`. - - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Seek - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`SubscriberClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions diff --git a/google/cloud/pubsub_v1/proto/__init__.py b/google/cloud/pubsub_v1/proto/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/google/cloud/pubsub_v1/proto/pubsub.proto b/google/cloud/pubsub_v1/proto/pubsub.proto index dc9151446..909863eb9 100644 --- a/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/google/cloud/pubsub_v1/proto/pubsub.proto @@ -42,9 +42,8 @@ service Publisher { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates the given topic with the given name. See the - // - // resource name rules. + // Creates the given topic with the given name. See the [resource name rules]( + // https://cloud.google.com/pubsub/docs/admin#resource_names). rpc CreateTopic(Topic) returns (Topic) { option (google.api.http) = { put: "/v1/{name=projects/*/topics/*}" @@ -98,11 +97,10 @@ service Publisher { } // Lists the names of the snapshots on this topic. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { option (google.api.http) = { @@ -161,8 +159,8 @@ message Topic { // must not start with `"goog"`. string name = 1 [(google.api.field_behavior) = REQUIRED]; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 2; // Policy constraining the set of Google Cloud Platform regions where messages @@ -180,11 +178,11 @@ message Topic { // A message that is published by publishers and consumed by subscribers. The // message must contain either a non-empty data field or at least one attribute. // Note that client libraries represent this object differently -// depending on the language. See the corresponding -// client -// library documentation for more information. See -// Quotas and limits -// for more information about message limits. +// depending on the language. See the corresponding [client library +// documentation](https://cloud.google.com/pubsub/docs/reference/libraries) for +// more information. See [quotas and limits] +// (https://cloud.google.com/pubsub/quotas) for more information about message +// limits. message PubsubMessage { // The message data field. If this field is empty, the message must contain // at least one attribute. @@ -212,9 +210,6 @@ message PubsubMessage { // delivered to subscribers in the order in which they are received by the // Pub/Sub system. All `PubsubMessage`s published in a given `PublishRequest` // must specify the same `ordering_key` value. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. string ordering_key = 5; } @@ -388,19 +383,17 @@ service Subscriber { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates a subscription to a given topic. See the - // - // resource name rules. + // Creates a subscription to a given topic. See the [resource name rules] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). // If the subscription already exists, returns `ALREADY_EXISTS`. // If the corresponding topic doesn't exist, returns `NOT_FOUND`. // // If the name is not provided in the request, the server will assign a random // name for this subscription on the same project as the topic, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - // generated name is populated in the returned Subscription object. Note that - // for REST API requests, you must specify a name in the request. + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The generated + // name is populated in the returned Subscription object. Note that for REST + // API requests, you must specify a name in the request. rpc CreateSubscription(Subscription) returns (Subscription) { option (google.api.http) = { put: "/v1/{name=projects/*/subscriptions/*}" @@ -528,12 +521,11 @@ service Subscriber { option (google.api.method_signature) = "snapshot"; } - // Lists the existing snapshots. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // Lists the existing snapshots. Snapshots are used in [Seek]( + // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListSnapshots(ListSnapshotsRequest) returns (ListSnapshotsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/snapshots" @@ -542,21 +534,19 @@ service Subscriber { } // Creates a snapshot from the requested subscription. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. - //

If the snapshot already exists, returns `ALREADY_EXISTS`. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. + // If the snapshot already exists, returns `ALREADY_EXISTS`. // If the requested subscription doesn't exist, returns `NOT_FOUND`. // If the backlog in the subscription is too old -- and the resulting snapshot // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. // See also the `Snapshot.expire_time` field. If the name is not provided in // the request, the server will assign a random // name for this snapshot on the same project as the subscription, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The // generated name is populated in the returned Snapshot object. Note that for // REST API requests, you must specify a name in the request. rpc CreateSnapshot(CreateSnapshotRequest) returns (Snapshot) { @@ -580,12 +570,11 @@ service Subscriber { }; } - // Removes an existing snapshot. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

+ // Removes an existing snapshot. Snapshots are used in [Seek] + // (https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. // When the snapshot is deleted, all messages retained in the snapshot // are immediately dropped. After a snapshot is deleted, a new one may be // created with the same name, but the new one has no association with the old @@ -598,13 +587,12 @@ service Subscriber { } // Seeks an existing subscription to a point in time or to a given snapshot, - // whichever is provided in the request. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. Note that both the subscription and the snapshot - // must be on the same topic. + // whichever is provided in the request. Snapshots are used in [Seek]( + // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. Note that both the subscription and the + // snapshot must be on the same topic. rpc Seek(SeekRequest) returns (SeekResponse) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:seek" @@ -666,10 +654,8 @@ message Subscription { // Indicates whether to retain acknowledged messages. If true, then // messages are not expunged from the subscription's backlog, even if they are // acknowledged, until they fall out of the `message_retention_duration` - // window. This must be true if you would like to - // - // Seek to a timestamp. + // window. This must be true if you would like to [Seek to a timestamp] + // (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). bool retain_acked_messages = 7; // How long to retain unacknowledged messages in the subscription's backlog, @@ -688,9 +674,6 @@ message Subscription { // will be delivered to the subscribers in the order in which they // are received by the Pub/Sub system. Otherwise, they may be delivered in // any order. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. bool enable_message_ordering = 10; // A policy that specifies the conditions for this subscription's expiration. @@ -1186,11 +1169,10 @@ message UpdateSnapshotRequest { } // A snapshot resource. Snapshots are used in -// Seek -// operations, which allow -// you to manage message acknowledgments in bulk. That is, you can set the -// acknowledgment state of messages in an existing subscription to the state -// captured by a snapshot. +// [Seek](https://cloud.google.com/pubsub/docs/replay-overview) +// operations, which allow you to manage message acknowledgments in bulk. That +// is, you can set the acknowledgment state of messages in an existing +// subscription to the state captured by a snapshot. message Snapshot { option (google.api.resource) = { type: "pubsub.googleapis.com/Snapshot" @@ -1217,8 +1199,8 @@ message Snapshot { // snapshot that would expire in less than 1 hour after creation. google.protobuf.Timestamp expire_time = 3; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 4; } diff --git a/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/google/cloud/pubsub_v1/proto/pubsub_pb2.py deleted file mode 100644 index 44dc06898..000000000 --- a/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ /dev/null @@ -1,5246 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/pubsub_v1/proto/pubsub.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/pubsub_v1/proto/pubsub.proto", - package="google.pubsub.v1", - syntax="proto3", - serialized_options=b"\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xa9\x02\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12 \n\x18max_outstanding_messages\x18\x07 \x01(\x03\x12\x1d\n\x15max_outstanding_bytes\x18\x08 \x01(\x03"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_MESSAGESTORAGEPOLICY = _descriptor.Descriptor( - name="MessageStoragePolicy", - full_name="google.pubsub.v1.MessageStoragePolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="allowed_persistence_regions", - full_name="google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=306, - serialized_end=365, -) - - -_TOPIC_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Topic.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Topic.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Topic.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_TOPIC = _descriptor.Descriptor( - name="Topic", - full_name="google.pubsub.v1.Topic", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Topic.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Topic.labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_storage_policy", - full_name="google.pubsub.v1.Topic.message_storage_policy", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="kms_key_name", - full_name="google.pubsub.v1.Topic.kms_key_name", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_TOPIC_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=368, - serialized_end=674, -) - - -_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUBSUBMESSAGE = _descriptor.Descriptor( - name="PubsubMessage", - full_name="google.pubsub.v1.PubsubMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data", - full_name="google.pubsub.v1.PubsubMessage.data", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PubsubMessage.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_id", - full_name="google.pubsub.v1.PubsubMessage.message_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="publish_time", - full_name="google.pubsub.v1.PubsubMessage.publish_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ordering_key", - full_name="google.pubsub.v1.PubsubMessage.ordering_key", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=677, - serialized_end=918, -) - - -_GETTOPICREQUEST = _descriptor.Descriptor( - name="GetTopicRequest", - full_name="google.pubsub.v1.GetTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.GetTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=920, - serialized_end=989, -) - - -_UPDATETOPICREQUEST = _descriptor.Descriptor( - name="UpdateTopicRequest", - full_name="google.pubsub.v1.UpdateTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.UpdateTopicRequest.topic", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateTopicRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=991, - serialized_end=1110, -) - - -_PUBLISHREQUEST = _descriptor.Descriptor( - name="PublishRequest", - full_name="google.pubsub.v1.PublishRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.PublishRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="messages", - full_name="google.pubsub.v1.PublishRequest.messages", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1112, - serialized_end=1236, -) - - -_PUBLISHRESPONSE = _descriptor.Descriptor( - name="PublishResponse", - full_name="google.pubsub.v1.PublishResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="message_ids", - full_name="google.pubsub.v1.PublishResponse.message_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1238, - serialized_end=1276, -) - - -_LISTTOPICSREQUEST = _descriptor.Descriptor( - name="ListTopicsRequest", - full_name="google.pubsub.v1.ListTopicsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListTopicsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1279, - serialized_end=1407, -) - - -_LISTTOPICSRESPONSE = _descriptor.Descriptor( - name="ListTopicsResponse", - full_name="google.pubsub.v1.ListTopicsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topics", - full_name="google.pubsub.v1.ListTopicsResponse.topics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1409, - serialized_end=1495, -) - - -_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListTopicSubscriptionsRequest", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1497, - serialized_end=1619, -) - - -_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListTopicSubscriptionsResponse", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1621, - serialized_end=1742, -) - - -_LISTTOPICSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListTopicSnapshotsRequest", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1744, - serialized_end=1862, -) - - -_LISTTOPICSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListTopicSnapshotsResponse", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.snapshots", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1864, - serialized_end=1936, -) - - -_DELETETOPICREQUEST = _descriptor.Descriptor( - name="DeleteTopicRequest", - full_name="google.pubsub.v1.DeleteTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.DeleteTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1938, - serialized_end=2010, -) - - -_DETACHSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DetachSubscriptionRequest", - full_name="google.pubsub.v1.DetachSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DetachSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2012, - serialized_end=2105, -) - - -_DETACHSUBSCRIPTIONRESPONSE = _descriptor.Descriptor( - name="DetachSubscriptionResponse", - full_name="google.pubsub.v1.DetachSubscriptionResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2107, - serialized_end=2135, -) - - -_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Subscription.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Subscription.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Subscription.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SUBSCRIPTION = _descriptor.Descriptor( - name="Subscription", - full_name="google.pubsub.v1.Subscription", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Subscription.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Subscription.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.Subscription.push_config", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.Subscription.ack_deadline_seconds", - index=3, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retain_acked_messages", - full_name="google.pubsub.v1.Subscription.retain_acked_messages", - index=4, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_retention_duration", - full_name="google.pubsub.v1.Subscription.message_retention_duration", - index=5, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Subscription.labels", - index=6, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="enable_message_ordering", - full_name="google.pubsub.v1.Subscription.enable_message_ordering", - index=7, - number=10, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expiration_policy", - full_name="google.pubsub.v1.Subscription.expiration_policy", - index=8, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.pubsub.v1.Subscription.filter", - index=9, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dead_letter_policy", - full_name="google.pubsub.v1.Subscription.dead_letter_policy", - index=10, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_policy", - full_name="google.pubsub.v1.Subscription.retry_policy", - index=11, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="detached", - full_name="google.pubsub.v1.Subscription.detached", - index=12, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SUBSCRIPTION_LABELSENTRY,], - enum_types=[], - serialized_options=b'\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}', - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2138, - serialized_end=2842, -) - - -_RETRYPOLICY = _descriptor.Descriptor( - name="RetryPolicy", - full_name="google.pubsub.v1.RetryPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="minimum_backoff", - full_name="google.pubsub.v1.RetryPolicy.minimum_backoff", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="maximum_backoff", - full_name="google.pubsub.v1.RetryPolicy.maximum_backoff", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2844, - serialized_end=2961, -) - - -_DEADLETTERPOLICY = _descriptor.Descriptor( - name="DeadLetterPolicy", - full_name="google.pubsub.v1.DeadLetterPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dead_letter_topic", - full_name="google.pubsub.v1.DeadLetterPolicy.dead_letter_topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_delivery_attempts", - full_name="google.pubsub.v1.DeadLetterPolicy.max_delivery_attempts", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2963, - serialized_end=3039, -) - - -_EXPIRATIONPOLICY = _descriptor.Descriptor( - name="ExpirationPolicy", - full_name="google.pubsub.v1.ExpirationPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ttl", - full_name="google.pubsub.v1.ExpirationPolicy.ttl", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3041, - serialized_end=3099, -) - - -_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.pubsub.v1.PushConfig.OidcToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="audience", - full_name="google.pubsub.v1.PushConfig.OidcToken.audience", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3267, - serialized_end=3327, -) - -_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PushConfig.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUSHCONFIG = _descriptor.Descriptor( - name="PushConfig", - full_name="google.pubsub.v1.PushConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="push_endpoint", - full_name="google.pubsub.v1.PushConfig.push_endpoint", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PushConfig.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oidc_token", - full_name="google.pubsub.v1.PushConfig.oidc_token", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="authentication_method", - full_name="google.pubsub.v1.PushConfig.authentication_method", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3102, - serialized_end=3403, -) - - -_RECEIVEDMESSAGE = _descriptor.Descriptor( - name="ReceivedMessage", - full_name="google.pubsub.v1.ReceivedMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ack_id", - full_name="google.pubsub.v1.ReceivedMessage.ack_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="google.pubsub.v1.ReceivedMessage.message", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="delivery_attempt", - full_name="google.pubsub.v1.ReceivedMessage.delivery_attempt", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3405, - serialized_end=3514, -) - - -_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="GetSubscriptionRequest", - full_name="google.pubsub.v1.GetSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.GetSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3516, - serialized_end=3606, -) - - -_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="UpdateSubscriptionRequest", - full_name="google.pubsub.v1.UpdateSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.subscription", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3609, - serialized_end=3749, -) - - -_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListSubscriptionsRequest", - full_name="google.pubsub.v1.ListSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSubscriptionsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3752, - serialized_end=3887, -) - - -_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListSubscriptionsResponse", - full_name="google.pubsub.v1.ListSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3889, - serialized_end=3996, -) - - -_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DeleteSubscriptionRequest", - full_name="google.pubsub.v1.DeleteSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DeleteSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3998, - serialized_end=4091, -) - - -_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( - name="ModifyPushConfigRequest", - full_name="google.pubsub.v1.ModifyPushConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyPushConfigRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.ModifyPushConfigRequest.push_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4094, - serialized_end=4241, -) - - -_PULLREQUEST = _descriptor.Descriptor( - name="PullRequest", - full_name="google.pubsub.v1.PullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.PullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="return_immediately", - full_name="google.pubsub.v1.PullRequest.return_immediately", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_messages", - full_name="google.pubsub.v1.PullRequest.max_messages", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4244, - serialized_end=4385, -) - - -_PULLRESPONSE = _descriptor.Descriptor( - name="PullResponse", - full_name="google.pubsub.v1.PullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.PullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4387, - serialized_end=4463, -) - - -_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( - name="ModifyAckDeadlineRequest", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids", - index=1, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4466, - serialized_end=4615, -) - - -_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( - name="AcknowledgeRequest", - full_name="google.pubsub.v1.AcknowledgeRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.AcknowledgeRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.AcknowledgeRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4617, - serialized_end=4725, -) - - -_STREAMINGPULLREQUEST = _descriptor.Descriptor( - name="StreamingPullRequest", - full_name="google.pubsub.v1.StreamingPullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.StreamingPullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="stream_ack_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="client_id", - full_name="google.pubsub.v1.StreamingPullRequest.client_id", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_messages", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_messages", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_bytes", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_bytes", - index=7, - number=8, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4728, - serialized_end=5025, -) - - -_STREAMINGPULLRESPONSE = _descriptor.Descriptor( - name="StreamingPullResponse", - full_name="google.pubsub.v1.StreamingPullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.StreamingPullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5027, - serialized_end=5112, -) - - -_CREATESNAPSHOTREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="CreateSnapshotRequest", - full_name="google.pubsub.v1.CreateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.CreateSnapshotRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.CreateSnapshotRequest.subscription", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.CreateSnapshotRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5115, - serialized_end=5374, -) - - -_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="UpdateSnapshotRequest", - full_name="google.pubsub.v1.UpdateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.UpdateSnapshotRequest.snapshot", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSnapshotRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5377, - serialized_end=5505, -) - - -_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Snapshot.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SNAPSHOT = _descriptor.Descriptor( - name="Snapshot", - full_name="google.pubsub.v1.Snapshot", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Snapshot.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Snapshot.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.pubsub.v1.Snapshot.expire_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Snapshot.labels", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SNAPSHOT_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5508, - serialized_end=5811, -) - - -_GETSNAPSHOTREQUEST = _descriptor.Descriptor( - name="GetSnapshotRequest", - full_name="google.pubsub.v1.GetSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.GetSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5813, - serialized_end=5891, -) - - -_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListSnapshotsRequest", - full_name="google.pubsub.v1.ListSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSnapshotsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5894, - serialized_end=6025, -) - - -_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListSnapshotsResponse", - full_name="google.pubsub.v1.ListSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListSnapshotsResponse.snapshots", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6027, - serialized_end=6122, -) - - -_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( - name="DeleteSnapshotRequest", - full_name="google.pubsub.v1.DeleteSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.DeleteSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6124, - serialized_end=6205, -) - - -_SEEKREQUEST = _descriptor.Descriptor( - name="SeekRequest", - full_name="google.pubsub.v1.SeekRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.SeekRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time", - full_name="google.pubsub.v1.SeekRequest.time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.SeekRequest.snapshot", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="target", - full_name="google.pubsub.v1.SeekRequest.target", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=6208, - serialized_end=6398, -) - - -_SEEKRESPONSE = _descriptor.Descriptor( - name="SeekResponse", - full_name="google.pubsub.v1.SeekResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6400, - serialized_end=6414, -) - -_TOPIC_LABELSENTRY.containing_type = _TOPIC -_TOPIC.fields_by_name["labels"].message_type = _TOPIC_LABELSENTRY -_TOPIC.fields_by_name["message_storage_policy"].message_type = _MESSAGESTORAGEPOLICY -_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE -_PUBSUBMESSAGE.fields_by_name[ - "attributes" -].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY -_PUBSUBMESSAGE.fields_by_name[ - "publish_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UPDATETOPICREQUEST.fields_by_name["topic"].message_type = _TOPIC -_UPDATETOPICREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_PUBLISHREQUEST.fields_by_name["messages"].message_type = _PUBSUBMESSAGE -_LISTTOPICSRESPONSE.fields_by_name["topics"].message_type = _TOPIC -_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION -_SUBSCRIPTION.fields_by_name["push_config"].message_type = _PUSHCONFIG -_SUBSCRIPTION.fields_by_name[ - "message_retention_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY -_SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY -_SUBSCRIPTION.fields_by_name["dead_letter_policy"].message_type = _DEADLETTERPOLICY -_SUBSCRIPTION.fields_by_name["retry_policy"].message_type = _RETRYPOLICY -_RETRYPOLICY.fields_by_name[ - "minimum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYPOLICY.fields_by_name[ - "maximum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_EXPIRATIONPOLICY.fields_by_name[ - "ttl" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_PUSHCONFIG_OIDCTOKEN.containing_type = _PUSHCONFIG -_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG -_PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY -_PUSHCONFIG.fields_by_name["oidc_token"].message_type = _PUSHCONFIG_OIDCTOKEN -_PUSHCONFIG.oneofs_by_name["authentication_method"].fields.append( - _PUSHCONFIG.fields_by_name["oidc_token"] -) -_PUSHCONFIG.fields_by_name["oidc_token"].containing_oneof = _PUSHCONFIG.oneofs_by_name[ - "authentication_method" -] -_RECEIVEDMESSAGE.fields_by_name["message"].message_type = _PUBSUBMESSAGE -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"].message_type = _SUBSCRIPTION -_UPDATESUBSCRIPTIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"].message_type = _SUBSCRIPTION -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"].message_type = _PUSHCONFIG -_PULLRESPONSE.fields_by_name["received_messages"].message_type = _RECEIVEDMESSAGE -_STREAMINGPULLRESPONSE.fields_by_name[ - "received_messages" -].message_type = _RECEIVEDMESSAGE -_CREATESNAPSHOTREQUEST_LABELSENTRY.containing_type = _CREATESNAPSHOTREQUEST -_CREATESNAPSHOTREQUEST.fields_by_name[ - "labels" -].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"].message_type = _SNAPSHOT -_UPDATESNAPSHOTREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT -_SNAPSHOT.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SNAPSHOT.fields_by_name["labels"].message_type = _SNAPSHOT_LABELSENTRY -_LISTSNAPSHOTSRESPONSE.fields_by_name["snapshots"].message_type = _SNAPSHOT -_SEEKREQUEST.fields_by_name[ - "time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SEEKREQUEST.oneofs_by_name["target"].fields.append(_SEEKREQUEST.fields_by_name["time"]) -_SEEKREQUEST.fields_by_name["time"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -_SEEKREQUEST.oneofs_by_name["target"].fields.append( - _SEEKREQUEST.fields_by_name["snapshot"] -) -_SEEKREQUEST.fields_by_name["snapshot"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -DESCRIPTOR.message_types_by_name["MessageStoragePolicy"] = _MESSAGESTORAGEPOLICY -DESCRIPTOR.message_types_by_name["Topic"] = _TOPIC -DESCRIPTOR.message_types_by_name["PubsubMessage"] = _PUBSUBMESSAGE -DESCRIPTOR.message_types_by_name["GetTopicRequest"] = _GETTOPICREQUEST -DESCRIPTOR.message_types_by_name["UpdateTopicRequest"] = _UPDATETOPICREQUEST -DESCRIPTOR.message_types_by_name["PublishRequest"] = _PUBLISHREQUEST -DESCRIPTOR.message_types_by_name["PublishResponse"] = _PUBLISHRESPONSE -DESCRIPTOR.message_types_by_name["ListTopicsRequest"] = _LISTTOPICSREQUEST -DESCRIPTOR.message_types_by_name["ListTopicsResponse"] = _LISTTOPICSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsRequest" -] = _LISTTOPICSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsResponse" -] = _LISTTOPICSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsRequest" -] = _LISTTOPICSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsResponse" -] = _LISTTOPICSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionRequest" -] = _DETACHSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionResponse" -] = _DETACHSUBSCRIPTIONRESPONSE -DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION -DESCRIPTOR.message_types_by_name["RetryPolicy"] = _RETRYPOLICY -DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY -DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY -DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG -DESCRIPTOR.message_types_by_name["ReceivedMessage"] = _RECEIVEDMESSAGE -DESCRIPTOR.message_types_by_name["GetSubscriptionRequest"] = _GETSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateSubscriptionRequest" -] = _UPDATESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ListSubscriptionsRequest"] = _LISTSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListSubscriptionsResponse" -] = _LISTSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteSubscriptionRequest" -] = _DELETESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ModifyPushConfigRequest"] = _MODIFYPUSHCONFIGREQUEST -DESCRIPTOR.message_types_by_name["PullRequest"] = _PULLREQUEST -DESCRIPTOR.message_types_by_name["PullResponse"] = _PULLRESPONSE -DESCRIPTOR.message_types_by_name["ModifyAckDeadlineRequest"] = _MODIFYACKDEADLINEREQUEST -DESCRIPTOR.message_types_by_name["AcknowledgeRequest"] = _ACKNOWLEDGEREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullRequest"] = _STREAMINGPULLREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullResponse"] = _STREAMINGPULLRESPONSE -DESCRIPTOR.message_types_by_name["CreateSnapshotRequest"] = _CREATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["UpdateSnapshotRequest"] = _UPDATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT -DESCRIPTOR.message_types_by_name["GetSnapshotRequest"] = _GETSNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsRequest"] = _LISTSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsResponse"] = _LISTSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteSnapshotRequest"] = _DELETESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["SeekRequest"] = _SEEKREQUEST -DESCRIPTOR.message_types_by_name["SeekResponse"] = _SEEKRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -MessageStoragePolicy = _reflection.GeneratedProtocolMessageType( - "MessageStoragePolicy", - (_message.Message,), - { - "DESCRIPTOR": _MESSAGESTORAGEPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy constraining the storage of messages published to the topic. - - - Attributes: - allowed_persistence_regions: - A list of IDs of GCP regions where messages that are published - to the topic may be persisted in storage. Messages published - by publishers running in non-allowed GCP regions (or running - outside of GCP altogether) will be routed for storage in one - of the allowed regions. An empty list means that no regions - are allowed, and is not a valid configuration. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) - }, -) -_sym_db.RegisterMessage(MessageStoragePolicy) - -Topic = _reflection.GeneratedProtocolMessageType( - "Topic", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _TOPIC_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) - }, - ), - "DESCRIPTOR": _TOPIC, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A topic resource. - - - Attributes: - name: - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must - start with a letter, and contain only letters (``[A-Za-z]``), - numbers (``[0-9]``), dashes (``-``), underscores (``_``), - periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, - and it must not start with ``"goog"``. - labels: - See Creating and managing labels. - message_storage_policy: - Policy constraining the set of Google Cloud Platform regions - where messages published to the topic may be stored. If not - present, then no constraints are in effect. - kms_key_name: - The resource name of the Cloud KMS CryptoKey to be used to - protect access to messages published on this topic. The - expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) - }, -) -_sym_db.RegisterMessage(Topic) -_sym_db.RegisterMessage(Topic.LabelsEntry) - -PubsubMessage = _reflection.GeneratedProtocolMessageType( - "PubsubMessage", - (_message.Message,), - { - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUBSUBMESSAGE_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUBSUBMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message that is published by publishers and consumed by subscribers. - The message must contain either a non-empty data field or at least one - attribute. Note that client libraries represent this object - differently depending on the language. See the corresponding client - library documentation for more information. See Quotas and limits for - more information about message limits. - - - Attributes: - data: - The message data field. If this field is empty, the message - must contain at least one attribute. - attributes: - Attributes for this message. If this field is empty, the - message must contain non-empty data. This can be used to - filter messages on the subscription. - message_id: - ID of this message, assigned by the server when the message is - published. Guaranteed to be unique within the topic. This - value may be read by a subscriber that receives a - ``PubsubMessage`` via a ``Pull`` call or a push delivery. It - must not be populated by the publisher in a ``Publish`` call. - publish_time: - The time at which the message was published, populated by the - server when it receives the ``Publish`` call. It must not be - populated by the publisher in a ``Publish`` call. - ordering_key: - If non-empty, identifies related messages for which publish - order should be respected. If a ``Subscription`` has - ``enable_message_ordering`` set to ``true``, messages - published with the same non-empty ``ordering_key`` value will - be delivered to subscribers in the order in which they are - received by the Pub/Sub system. All ``PubsubMessage``\ s - published in a given ``PublishRequest`` must specify the same - ``ordering_key`` value. EXPERIMENTAL: This feature is part of - a closed alpha release. This API might be changed in backward- - incompatible ways and is not recommended for production use. - It is not subject to any SLA or deprecation policy. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) - }, -) -_sym_db.RegisterMessage(PubsubMessage) -_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) - -GetTopicRequest = _reflection.GeneratedProtocolMessageType( - "GetTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetTopic method. - - - Attributes: - topic: - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) - }, -) -_sym_db.RegisterMessage(GetTopicRequest) - -UpdateTopicRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateTopic method. - - - Attributes: - topic: - Required. The updated topic object. - update_mask: - Required. Indicates which fields in the provided topic to - update. Must be specified and non-empty. Note that if - ``update_mask`` contains “message_storage_policy” but the - ``message_storage_policy`` is not set in the ``topic`` - provided above, then the updated value is determined by the - policy configured at the project or organization level. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) - }, -) -_sym_db.RegisterMessage(UpdateTopicRequest) - -PublishRequest = _reflection.GeneratedProtocolMessageType( - "PublishRequest", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Publish method. - - - Attributes: - topic: - Required. The messages in the request will be published on - this topic. Format is ``projects/{project}/topics/{topic}``. - messages: - Required. The messages to publish. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) - }, -) -_sym_db.RegisterMessage(PublishRequest) - -PublishResponse = _reflection.GeneratedProtocolMessageType( - "PublishResponse", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Publish`` method. - - - Attributes: - message_ids: - The server-assigned ID of each published message, in the same - order as the messages in the request. IDs are guaranteed to be - unique within the topic. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) - }, -) -_sym_db.RegisterMessage(PublishResponse) - -ListTopicsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopics`` method. - - - Attributes: - project: - Required. The name of the project in which to list topics. - Format is ``projects/{project-id}``. - page_size: - Maximum number of topics to return. - page_token: - The value returned by the last ``ListTopicsResponse``; - indicates that this is a continuation of a prior - ``ListTopics`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicsRequest) - -ListTopicsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopics`` method. - - - Attributes: - topics: - The resulting topics. - next_page_token: - If not empty, indicates that there may be more topics that - match the request; this value should be passed in a new - ``ListTopicsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicsResponse) - -ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSubscriptions`` method. - - - Attributes: - topic: - Required. The name of the topic that subscriptions are - attached to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of subscription names to return. - page_token: - The value returned by the last - ``ListTopicSubscriptionsResponse``; indicates that this is a - continuation of a prior ``ListTopicSubscriptions`` call, and - that the system should return the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) - -ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSubscriptions`` method. - - - Attributes: - subscriptions: - The names of subscriptions attached to the topic specified in - the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListTopicSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) - -ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSnapshots`` method. - - - Attributes: - topic: - Required. The name of the topic that snapshots are attached - to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of snapshot names to return. - page_token: - The value returned by the last ``ListTopicSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListTopicSnapshots`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsRequest) - -ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSnapshots`` method. - - - Attributes: - snapshots: - The names of the snapshots that match the request. - next_page_token: - If not empty, indicates that there may be more snapshots that - match the request; this value should be passed in a new - ``ListTopicSnapshotsRequest`` to get more snapshots. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsResponse) - -DeleteTopicRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteTopic`` method. - - - Attributes: - topic: - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) - }, -) -_sym_db.RegisterMessage(DeleteTopicRequest) - -DetachSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DetachSubscription method. - - - Attributes: - subscription: - Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionRequest) - -DetachSubscriptionResponse = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionResponse", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the DetachSubscription method. Reserved for future use.""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionResponse) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionResponse) - -Subscription = _reflection.GeneratedProtocolMessageType( - "Subscription", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SUBSCRIPTION_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) - }, - ), - "DESCRIPTOR": _SUBSCRIPTION, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A subscription resource. - - - Attributes: - name: - Required. The name of the subscription. It must have the - format ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only - letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus - (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - topic: - Required. The name of the topic from which this subscription - is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this field - will be ``_deleted-topic_`` if the topic has been deleted. - push_config: - If push delivery is used with this subscription, this field is - used to configure it. An empty ``pushConfig`` signifies that - the subscriber will pull and ack messages using API methods. - ack_deadline_seconds: - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt before - resending the message. In the interval after the message is - delivered and before it is acknowledged, it is considered to - be outstanding. During that time period, the message will not - be redelivered (on a best-effort basis). For pull - subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if - using non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. - The minimum custom deadline you can specify is 10 seconds. The - maximum custom deadline you can specify is 600 seconds (10 - minutes). If this parameter is 0, a default value of 10 - seconds is used. For push delivery, this value is also used - to set the request timeout for the call to the push endpoint. - If the subscriber never acknowledges the message, the Pub/Sub - system will eventually redeliver the message. - retain_acked_messages: - Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription’s - backlog, even if they are acknowledged, until they fall out of - the ``message_retention_duration`` window. This must be true - if you would like to Seek to a timestamp. - message_retention_duration: - How long to retain unacknowledged messages in the - subscription’s backlog, from the moment a message is - published. If ``retain_acked_messages`` is true, then this - also configures the retention of acknowledged messages, and - thus configures how far back in time a ``Seek`` can be done. - Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. - labels: - See Creating and managing labels. - enable_message_ordering: - If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the - order in which they are received by the Pub/Sub system. - Otherwise, they may be delivered in any order. EXPERIMENTAL: - This feature is part of a closed alpha release. This API might - be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy: - A policy that specifies the conditions for this subscription’s - expiration. A subscription is considered active as long as any - connected subscriber is successfully consuming messages from - the subscription or is issuing operations on the subscription. - If ``expiration_policy`` is not set, a *default policy* with - ``ttl`` of 31 days will be used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. - filter: - An expression written in the Pub/Sub `filter language - `__. If non- - empty, then only ``PubsubMessage``\ s whose ``attributes`` - field matches the filter are delivered on this subscription. - If empty, then no messages are filtered out. - dead_letter_policy: - A policy that specifies the conditions for dead lettering - messages in this subscription. If dead_letter_policy is not - set, dead lettering is disabled. The Cloud Pub/Sub service - account associated with this subscriptions’s parent project - (i.e., service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Acknowledge() messages on this subscription. - retry_policy: - A policy that specifies how Pub/Sub retries message delivery - for this subscription. If not set, the default retry policy - is applied. This generally implies that messages will be - retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement - deadline exceeded events for a given message. - detached: - Indicates whether the subscription is detached from its topic. - Detached subscriptions don’t receive messages from their topic - and don’t retain any backlog. ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription - is a push subscription, pushes to the endpoint will not be - made. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) - }, -) -_sym_db.RegisterMessage(Subscription) -_sym_db.RegisterMessage(Subscription.LabelsEntry) - -RetryPolicy = _reflection.GeneratedProtocolMessageType( - "RetryPolicy", - (_message.Message,), - { - "DESCRIPTOR": _RETRYPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies how Cloud Pub/Sub retries message delivery. - Retry delay will be exponential based on provided minimum and maximum - backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. Retry Policy is implemented on a - best effort basis. At times, the delay between consecutive deliveries - may not match the configuration. That is, delay can be more or less - than configured backoff. - - - Attributes: - minimum_backoff: - The minimum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 10 seconds. - maximum_backoff: - The maximum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 600 seconds. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.RetryPolicy) - }, -) -_sym_db.RegisterMessage(RetryPolicy) - -DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( - "DeadLetterPolicy", - (_message.Message,), - { - "DESCRIPTOR": _DEADLETTERPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Dead lettering is done on a best effort basis. The same message might - be dead lettered multiple times. If validation on any of the fields - fails at subscription creation/updation, the create/update - subscription request will fail. - - - Attributes: - dead_letter_topic: - The name of the topic to which dead letter messages should be - published. Format is ``projects/{project}/topics/{topic}``.The - Cloud Pub/Sub service account associated with the enclosing - subscription’s parent project (i.e., - service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Publish() to this topic. The operation will fail if the topic - does not exist. Users should ensure that there is a - subscription attached to this topic since messages published - to a topic with no subscriptions are lost. - max_delivery_attempts: - The maximum number of delivery attempts for any message. The - value must be between 5 and 100. The number of delivery - attempts is defined as 1 + (the sum of number of NACKs and - number of times the acknowledgement deadline has been exceeded - for the message). A NACK is any call to ModifyAckDeadline - with a 0 deadline. Note that client libraries may - automatically extend ack_deadlines. This field will be - honored on a best effort basis. If this parameter is 0, a - default value of 5 is used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeadLetterPolicy) - }, -) -_sym_db.RegisterMessage(DeadLetterPolicy) - -ExpirationPolicy = _reflection.GeneratedProtocolMessageType( - "ExpirationPolicy", - (_message.Message,), - { - "DESCRIPTOR": _EXPIRATIONPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies the conditions for resource expiration (i.e., - automatic resource deletion). - - - Attributes: - ttl: - Specifies the “time-to-live” duration for an associated - resource. The resource expires if it is not active for a - period of ``ttl``. The definition of “activity” depends on the - type of the associated resource. The minimum and maximum - allowed values for ``ttl`` depend on the type of the - associated resource, as well. If ``ttl`` is not set, the - associated resource never expires. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) - }, -) -_sym_db.RegisterMessage(ExpirationPolicy) - -PushConfig = _reflection.GeneratedProtocolMessageType( - "PushConfig", - (_message.Message,), - { - "OidcToken": _reflection.GeneratedProtocolMessageType( - "OidcToken", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_OIDCTOKEN, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Contains information needed for generating an `OpenID Connect token - `__. - - - Attributes: - service_account_email: - \ `Service account email - `__ to be - used for generating the OIDC token. The caller (for - CreateSubscription, UpdateSubscription, and ModifyPushConfig - RPCs) must have the iam.serviceAccounts.actAs permission for - the service account. - audience: - Audience to be used when generating OIDC token. The audience - claim identifies the recipients that the JWT is intended for. - The audience value is a single case-sensitive string. Having - multiple values (array) for the audience field is not - supported. More info about the OIDC JWT token audience here: - https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not - specified, the Push endpoint URL will be used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) - }, - ), - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUSHCONFIG, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Configuration for a push delivery endpoint. - - - Attributes: - push_endpoint: - A URL locating the endpoint to which messages should be - pushed. For example, a Webhook endpoint might use - ``https://example.com/push``. - attributes: - Endpoint configuration attributes that can be used to control - different aspects of the message delivery. The only currently - supported attribute is ``x-goog-version``, which you can use - to change the format of the pushed message. This attribute - indicates the version of the data expected by the endpoint. - This controls the shape of the pushed message (i.e., its - fields and metadata). If not present during the - ``CreateSubscription`` call, it will default to the version of - the Pub/Sub API used to make such call. If not present in a - ``ModifyPushConfig`` call, its value will not be changed. - ``GetSubscription`` calls will always return a valid version, - even if the subscription was created without this attribute. - The only supported values for the ``x-goog-version`` attribute - are: - ``v1beta1``: uses the push format defined in the - v1beta1 Pub/Sub API. - ``v1`` or ``v1beta2``: uses the push - format defined in the v1 Pub/Sub API. For example: .. - raw:: html
attributes { "x-goog-version": "v1"
-          } 
- authentication_method: - An authentication method used by push endpoints to verify the - source of push requests. This can be used with push endpoints - that are private by default to allow requests only from the - Cloud Pub/Sub system, for example. This field is optional and - should be set only by users interested in authenticated push. - oidc_token: - If specified, Pub/Sub will generate and attach an OIDC JWT - token as an ``Authorization`` header in the HTTP request for - every pushed message. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) - }, -) -_sym_db.RegisterMessage(PushConfig) -_sym_db.RegisterMessage(PushConfig.OidcToken) -_sym_db.RegisterMessage(PushConfig.AttributesEntry) - -ReceivedMessage = _reflection.GeneratedProtocolMessageType( - "ReceivedMessage", - (_message.Message,), - { - "DESCRIPTOR": _RECEIVEDMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message and its corresponding acknowledgment ID. - - - Attributes: - ack_id: - This ID can be used to acknowledge the received message. - message: - The message. - delivery_attempt: - The approximate number of times that Cloud Pub/Sub has - attempted to deliver the associated message to a subscriber. - More precisely, this is 1 + (number of NACKs) + (number of - ack_deadline exceeds) for this message. A NACK is any call to - ModifyAckDeadline with a 0 deadline. An ack_deadline exceeds - event is whenever a message is not acknowledged within - ack_deadline. Note that ack_deadline is initially - Subscription.ackDeadlineSeconds, but may get extended - automatically by the client library. Upon the first delivery - of a given message, ``delivery_attempt`` will have a value of - 1. The value is calculated at best effort and is approximate. - If a DeadLetterPolicy is not set on the subscription, this - will be 0. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) - }, -) -_sym_db.RegisterMessage(ReceivedMessage) - -GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "GetSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSubscription method. - - - Attributes: - subscription: - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(GetSubscriptionRequest) - -UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSubscription method. - - - Attributes: - subscription: - Required. The updated subscription object. - update_mask: - Required. Indicates which fields in the provided subscription - to update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(UpdateSubscriptionRequest) - -ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSubscriptions`` method. - - - Attributes: - project: - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - page_size: - Maximum number of subscriptions to return. - page_token: - The value returned by the last ``ListSubscriptionsResponse``; - indicates that this is a continuation of a prior - ``ListSubscriptions`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsRequest) - -ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSubscriptions`` method. - - - Attributes: - subscriptions: - The subscriptions that match the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsResponse) - -DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DeleteSubscription method. - - - Attributes: - subscription: - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DeleteSubscriptionRequest) - -ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType( - "ModifyPushConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYPUSHCONFIGREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyPushConfig method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config: - Required. The push configuration for future deliveries. An - empty ``pushConfig`` indicates that the Pub/Sub system should - stop pushing messages from the given subscription and allow - messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` or ``StreamingPull`` is not - called. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) - }, -) -_sym_db.RegisterMessage(ModifyPushConfigRequest) - -PullRequest = _reflection.GeneratedProtocolMessageType( - "PullRequest", - (_message.Message,), - { - "DESCRIPTOR": _PULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Pull`` method. - - - Attributes: - subscription: - Required. The subscription from which messages should be - pulled. Format is ``projects/{project}/subscriptions/{sub}``. - return_immediately: - Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to - return in the ``Pull`` response. Otherwise, the system may - wait (for a bounded amount of time) until at least one message - is available, rather than returning no messages. Warning: - setting this field to ``true`` is discouraged because it - adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - max_messages: - Required. The maximum number of messages to return for this - request. Must be a positive integer. The Pub/Sub system may - return fewer than the number specified. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) - }, -) -_sym_db.RegisterMessage(PullRequest) - -PullResponse = _reflection.GeneratedProtocolMessageType( - "PullResponse", - (_message.Message,), - { - "DESCRIPTOR": _PULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Pull`` method. - - - Attributes: - received_messages: - Received Pub/Sub messages. The list will be empty if there are - no more messages available in the backlog. For JSON, the - response can be entirely empty. The Pub/Sub system may return - fewer than the ``maxMessages`` requested even if there are - more messages available in the backlog. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) - }, -) -_sym_db.RegisterMessage(PullResponse) - -ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType( - "ModifyAckDeadlineRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYACKDEADLINEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyAckDeadline method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. List of acknowledgment IDs. - ack_deadline_seconds: - Required. The new ack deadline with respect to the time this - request was sent to the Pub/Sub system. For example, if the - value is 10, the new ack deadline will expire 10 seconds after - the ``ModifyAckDeadline`` call was made. Specifying zero might - immediately make the message available for delivery to another - subscriber client. This typically results in an increase in - the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) - }, -) -_sym_db.RegisterMessage(ModifyAckDeadlineRequest) - -AcknowledgeRequest = _reflection.GeneratedProtocolMessageType( - "AcknowledgeRequest", - (_message.Message,), - { - "DESCRIPTOR": _ACKNOWLEDGEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Acknowledge method. - - - Attributes: - subscription: - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in the - ``Pull`` response. Must not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) - }, -) -_sym_db.RegisterMessage(AcknowledgeRequest) - -StreamingPullRequest = _reflection.GeneratedProtocolMessageType( - "StreamingPullRequest", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``StreamingPull`` streaming RPC method. This request - is used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to the - server. - - - Attributes: - subscription: - Required. The subscription for which to initialize the new - stream. This must be provided in the first request on the - stream, and must not be set in subsequent requests from client - to server. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - List of acknowledgement IDs for acknowledging previously - received messages (received on this stream or a different - stream). If an ack ID has expired, the corresponding message - may be redelivered later. Acknowledging a message more than - once will not result in an error. If the acknowledgement ID is - malformed, the stream will be aborted with status - ``INVALID_ARGUMENT``. - modify_deadline_seconds: - The list of new ack deadlines for the IDs listed in - ``modify_deadline_ack_ids``. The size of this list must be the - same as the size of ``modify_deadline_ack_ids``. If it differs - the stream will be aborted with ``INVALID_ARGUMENT``. Each - element in this list is applied to the element in the same - position in ``modify_deadline_ack_ids``. The new ack deadline - is with respect to the time this request was sent to the - Pub/Sub system. Must be >= 0. For example, if the value is 10, - the new ack deadline will expire 10 seconds after this request - is received. If the value is 0, the message is immediately - made available for another streaming or non-streaming pull - request. If the value is < 0 (an error), the stream will be - aborted with status ``INVALID_ARGUMENT``. - modify_deadline_ack_ids: - List of acknowledgement IDs whose deadline will be modified - based on the corresponding element in - ``modify_deadline_seconds``. This field can be used to - indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if - the processing was interrupted. - stream_ack_deadline_seconds: - Required. The ack deadline to use for the stream. This must be - provided in the first request on the stream, but it can also - be updated on subsequent requests from client to server. The - minimum deadline you can specify is 10 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - client_id: - A unique identifier that is used to distinguish client - instances from each other. Only needs to be provided on the - initial request. When a stream disconnects and reconnects for - the same stream, the client_id should be set to the same value - so that state associated with the old stream can be - transferred to the new stream. The same client_id should not - be used for different client instances. - max_outstanding_messages: - Flow control settings for the maximum number of outstanding - messages. When there are ``max_outstanding_messages`` or more - currently sent to the streaming pull client that have not yet - been acked or nacked, the server stops sending more messages. - The sending of messages resumes once the number of outstanding - messages is less than this value. If the value is <= 0, there - is no limit to the number of outstanding messages. This - property can only be set on the initial StreamingPullRequest. - If it is set on a subsequent request, the stream will be - aborted with status ``INVALID_ARGUMENT``. - max_outstanding_bytes: - Flow control settings for the maximum number of outstanding - bytes. When there are ``max_outstanding_bytes`` or more worth - of messages currently sent to the streaming pull client that - have not yet been acked or nacked, the server will stop - sending more messages. The sending of messages resumes once - the number of outstanding bytes is less than this value. If - the value is <= 0, there is no limit to the number of - outstanding bytes. This property can only be set on the - initial StreamingPullRequest. If it is set on a subsequent - request, the stream will be aborted with status - ``INVALID_ARGUMENT``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) - }, -) -_sym_db.RegisterMessage(StreamingPullRequest) - -StreamingPullResponse = _reflection.GeneratedProtocolMessageType( - "StreamingPullResponse", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``StreamingPull`` method. This response is used to - stream messages from the server to the client. - - - Attributes: - received_messages: - Received Pub/Sub messages. This will not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) - }, -) -_sym_db.RegisterMessage(StreamingPullResponse) - -CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "CreateSnapshotRequest", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _CREATESNAPSHOTREQUEST_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) - }, - ), - "DESCRIPTOR": _CREATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``CreateSnapshot`` method. - - - Attributes: - name: - Required. User-provided name for this snapshot. If the name is - not provided in the request, the server will assign a random - name for this snapshot on the same project as the - subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. - subscription: - Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: - (a) The existing backlog on the subscription. More precisely, - this is defined as the messages in the subscription’s backlog - that are unacknowledged upon the successful completion of the - ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription’s topic following the successful - completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(CreateSnapshotRequest) -_sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) - -UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSnapshot method. - - - Attributes: - snapshot: - Required. The updated snapshot object. - update_mask: - Required. Indicates which fields in the provided snapshot to - update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(UpdateSnapshotRequest) - -Snapshot = _reflection.GeneratedProtocolMessageType( - "Snapshot", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SNAPSHOT_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) - }, - ), - "DESCRIPTOR": _SNAPSHOT, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A snapshot resource. Snapshots are used in Seek operations, which - allow you to manage message acknowledgments in bulk. That is, you can - set the acknowledgment state of messages in an existing subscription - to the state captured by a snapshot. - - - Attributes: - name: - The name of the snapshot. - topic: - The name of the topic from which this snapshot is retaining - messages. - expire_time: - The snapshot is guaranteed to exist up until this time. A - newly-created snapshot expires no later than 7 days from the - time of its creation. Its exact lifetime is determined at - creation by the existing backlog in the source subscription. - Specifically, the lifetime of the snapshot is ``7 days - (age - of oldest unacked message in the subscription)``. For example, - consider a subscription whose oldest unacked message is 3 days - old. If a snapshot is created from this subscription, the - snapshot – which will always capture this 3-day-old backlog as - long as the snapshot exists – will expire in 4 days. The - service will refuse to create a snapshot that would expire in - less than 1 hour after creation. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) - }, -) -_sym_db.RegisterMessage(Snapshot) -_sym_db.RegisterMessage(Snapshot.LabelsEntry) - -GetSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "GetSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSnapshot method. - - - Attributes: - snapshot: - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) - }, -) -_sym_db.RegisterMessage(GetSnapshotRequest) - -ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSnapshots`` method. - - - Attributes: - project: - Required. The name of the project in which to list snapshots. - Format is ``projects/{project-id}``. - page_size: - Maximum number of snapshots to return. - page_token: - The value returned by the last ``ListSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListSnapshots`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListSnapshotsRequest) - -ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSnapshots`` method. - - - Attributes: - snapshots: - The resulting snapshots. - next_page_token: - If not empty, indicates that there may be more snapshot that - match the request; this value should be passed in a new - ``ListSnapshotsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListSnapshotsResponse) - -DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteSnapshot`` method. - - - Attributes: - snapshot: - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) - }, -) -_sym_db.RegisterMessage(DeleteSnapshotRequest) - -SeekRequest = _reflection.GeneratedProtocolMessageType( - "SeekRequest", - (_message.Message,), - { - "DESCRIPTOR": _SEEKREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Seek`` method. - - - Attributes: - subscription: - Required. The subscription to affect. - time: - The time to seek to. Messages retained in the subscription - that were published before this time are marked as - acknowledged, and messages retained in the subscription that - were published after this time are marked as unacknowledged. - Note that this operation affects only those messages retained - in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). - For example, if ``time`` corresponds to a point before the - message retention window (or to a point before the system’s - notion of the subscription creation time), only retained - messages will be marked as unacknowledged, and already- - expunged messages will not be restored. - snapshot: - The snapshot to seek to. The snapshot’s topic must be the same - as that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) - }, -) -_sym_db.RegisterMessage(SeekRequest) - -SeekResponse = _reflection.GeneratedProtocolMessageType( - "SeekResponse", - (_message.Message,), - { - "DESCRIPTOR": _SEEKRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Seek`` method (this response is empty).""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) - }, -) -_sym_db.RegisterMessage(SeekResponse) - - -DESCRIPTOR._options = None -_TOPIC_LABELSENTRY._options = None -_TOPIC.fields_by_name["name"]._options = None -_TOPIC._options = None -_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = None -_GETTOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["update_mask"]._options = None -_PUBLISHREQUEST.fields_by_name["topic"]._options = None -_PUBLISHREQUEST.fields_by_name["messages"]._options = None -_LISTTOPICSREQUEST.fields_by_name["project"]._options = None -_LISTTOPICSUBSCRIPTIONSREQUEST.fields_by_name["topic"]._options = None -_LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None -_LISTTOPICSNAPSHOTSREQUEST.fields_by_name["topic"]._options = None -_DELETETOPICREQUEST.fields_by_name["topic"]._options = None -_DETACHSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_SUBSCRIPTION_LABELSENTRY._options = None -_SUBSCRIPTION.fields_by_name["name"]._options = None -_SUBSCRIPTION.fields_by_name["topic"]._options = None -_SUBSCRIPTION._options = None -_PUSHCONFIG_ATTRIBUTESENTRY._options = None -_GETSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["update_mask"]._options = None -_LISTSUBSCRIPTIONSREQUEST.fields_by_name["project"]._options = None -_DELETESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"]._options = None -_PULLREQUEST.fields_by_name["subscription"]._options = None -_PULLREQUEST.fields_by_name["return_immediately"]._options = None -_PULLREQUEST.fields_by_name["max_messages"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["subscription"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_ids"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_deadline_seconds"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["subscription"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["ack_ids"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["subscription"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["stream_ack_deadline_seconds"]._options = None -_CREATESNAPSHOTREQUEST_LABELSENTRY._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["name"]._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["subscription"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["update_mask"]._options = None -_SNAPSHOT_LABELSENTRY._options = None -_SNAPSHOT.fields_by_name["topic"]._options = None -_SNAPSHOT._options = None -_GETSNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_LISTSNAPSHOTSREQUEST.fields_by_name["project"]._options = None -_DELETESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_SEEKREQUEST.fields_by_name["subscription"]._options = None -_SEEKREQUEST.fields_by_name["snapshot"]._options = None - -_PUBLISHER = _descriptor.ServiceDescriptor( - name="Publisher", - full_name="google.pubsub.v1.Publisher", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=6417, - serialized_end=7860, - methods=[ - _descriptor.MethodDescriptor( - name="CreateTopic", - full_name="google.pubsub.v1.Publisher.CreateTopic", - index=0, - containing_service=None, - input_type=_TOPIC, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateTopic", - full_name="google.pubsub.v1.Publisher.UpdateTopic", - index=1, - containing_service=None, - input_type=_UPDATETOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Publish", - full_name="google.pubsub.v1.Publisher.Publish", - index=2, - containing_service=None, - input_type=_PUBLISHREQUEST, - output_type=_PUBLISHRESPONSE, - serialized_options=b"\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTopic", - full_name="google.pubsub.v1.Publisher.GetTopic", - index=3, - containing_service=None, - input_type=_GETTOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopics", - full_name="google.pubsub.v1.Publisher.ListTopics", - index=4, - containing_service=None, - input_type=_LISTTOPICSREQUEST, - output_type=_LISTTOPICSRESPONSE, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSubscriptions", - full_name="google.pubsub.v1.Publisher.ListTopicSubscriptions", - index=5, - containing_service=None, - input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, - output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSnapshots", - full_name="google.pubsub.v1.Publisher.ListTopicSnapshots", - index=6, - containing_service=None, - input_type=_LISTTOPICSNAPSHOTSREQUEST, - output_type=_LISTTOPICSNAPSHOTSRESPONSE, - serialized_options=b"\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTopic", - full_name="google.pubsub.v1.Publisher.DeleteTopic", - index=7, - containing_service=None, - input_type=_DELETETOPICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DetachSubscription", - full_name="google.pubsub.v1.Publisher.DetachSubscription", - index=8, - containing_service=None, - input_type=_DETACHSUBSCRIPTIONREQUEST, - output_type=_DETACHSUBSCRIPTIONRESPONSE, - serialized_options=b'\202\323\344\223\0026"4/v1/{subscription=projects/*/subscriptions/*}:detach', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_PUBLISHER) - -DESCRIPTOR.services_by_name["Publisher"] = _PUBLISHER - - -_SUBSCRIBER = _descriptor.ServiceDescriptor( - name="Subscriber", - full_name="google.pubsub.v1.Subscriber", - file=DESCRIPTOR, - index=1, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=7863, - serialized_end=10554, - methods=[ - _descriptor.MethodDescriptor( - name="CreateSubscription", - full_name="google.pubsub.v1.Subscriber.CreateSubscription", - index=0, - containing_service=None, - input_type=_SUBSCRIPTION, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSubscription", - full_name="google.pubsub.v1.Subscriber.GetSubscription", - index=1, - containing_service=None, - input_type=_GETSUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSubscription", - full_name="google.pubsub.v1.Subscriber.UpdateSubscription", - index=2, - containing_service=None, - input_type=_UPDATESUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSubscriptions", - full_name="google.pubsub.v1.Subscriber.ListSubscriptions", - index=3, - containing_service=None, - input_type=_LISTSUBSCRIPTIONSREQUEST, - output_type=_LISTSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSubscription", - full_name="google.pubsub.v1.Subscriber.DeleteSubscription", - index=4, - containing_service=None, - input_type=_DELETESUBSCRIPTIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyAckDeadline", - full_name="google.pubsub.v1.Subscriber.ModifyAckDeadline", - index=5, - containing_service=None, - input_type=_MODIFYACKDEADLINEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Acknowledge", - full_name="google.pubsub.v1.Subscriber.Acknowledge", - index=6, - containing_service=None, - input_type=_ACKNOWLEDGEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Pull", - full_name="google.pubsub.v1.Subscriber.Pull", - index=7, - containing_service=None, - input_type=_PULLREQUEST, - output_type=_PULLRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="StreamingPull", - full_name="google.pubsub.v1.Subscriber.StreamingPull", - index=8, - containing_service=None, - input_type=_STREAMINGPULLREQUEST, - output_type=_STREAMINGPULLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyPushConfig", - full_name="google.pubsub.v1.Subscriber.ModifyPushConfig", - index=9, - containing_service=None, - input_type=_MODIFYPUSHCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSnapshot", - full_name="google.pubsub.v1.Subscriber.GetSnapshot", - index=10, - containing_service=None, - input_type=_GETSNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSnapshots", - full_name="google.pubsub.v1.Subscriber.ListSnapshots", - index=11, - containing_service=None, - input_type=_LISTSNAPSHOTSREQUEST, - output_type=_LISTSNAPSHOTSRESPONSE, - serialized_options=b'\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateSnapshot", - full_name="google.pubsub.v1.Subscriber.CreateSnapshot", - index=12, - containing_service=None, - input_type=_CREATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSnapshot", - full_name="google.pubsub.v1.Subscriber.UpdateSnapshot", - index=13, - containing_service=None, - input_type=_UPDATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSnapshot", - full_name="google.pubsub.v1.Subscriber.DeleteSnapshot", - index=14, - containing_service=None, - input_type=_DELETESNAPSHOTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Seek", - full_name="google.pubsub.v1.Subscriber.Seek", - index=15, - containing_service=None, - input_type=_SEEKREQUEST, - output_type=_SEEKRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_SUBSCRIBER) - -DESCRIPTOR.services_by_name["Subscriber"] = _SUBSCRIBER - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py deleted file mode 100644 index ca2cf7903..000000000 --- a/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ /dev/null @@ -1,1284 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.pubsub_v1.proto import ( - pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/CreateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.UpdateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/UpdateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.Publish = channel.unary_unary( - "/google.pubsub.v1.Publisher/Publish", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - ) - self.GetTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/GetTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.ListTopics = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopics", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - ) - self.ListTopicSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - ) - self.ListTopicSnapshots = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - ) - self.DeleteTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/DeleteTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DetachSubscription = channel.unary_unary( - "/google.pubsub.v1.Publisher/DetachSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - ) - - -class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def CreateTopic(self, request, context): - """Creates the given topic with the given name. See the - - resource name rules. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a - topic are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSubscriptions(self, request, context): - """Lists the names of the attached subscriptions on this topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSnapshots(self, request, context): - """Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DetachSubscription(self, request, context): - """Detaches a subscription from this topic. All messages retained in the - subscription are dropped. Subsequent `Pull` and `StreamingPull` requests - will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateTopic": grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "UpdateTopic": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "Publish": grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, - ), - "GetTopic": grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "ListTopics": grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, - ), - "ListTopicSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, - ), - "ListTopicSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, - ), - "DeleteTopic": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DetachSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DetachSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Publisher", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Publisher(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - @staticmethod - def CreateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/CreateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/UpdateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Publish( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/Publish", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/GetTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopics( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopics", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DeleteTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DetachSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DetachSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - -class SubscriberStub(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.GetSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.UpdateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.ListSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - ) - self.DeleteSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ModifyAckDeadline = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Acknowledge = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Acknowledge", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Pull = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Pull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - ) - self.StreamingPull = channel.stream_stream( - "/google.pubsub.v1.Subscriber/StreamingPull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - ) - self.ModifyPushConfig = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.ListSnapshots = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - ) - self.CreateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.UpdateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.DeleteSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Seek = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Seek", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - ) - - -class SubscriberServicer(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. See the - - resource name rules. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. Note that - for REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Pull(self, request, context): - """Pulls messages from the server. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StreamingPull(self, request_iterator, context): - """Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `UNAVAILABLE` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSnapshot(self, request, context): - """Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSnapshots(self, request, context): - """Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. -

If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. - See also the `Snapshot.expire_time` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. Note that for - REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_SubscriberServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.CreateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "GetSubscription": grpc.unary_unary_rpc_method_handler( - servicer.GetSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "UpdateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "ListSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, - ), - "DeleteSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ModifyAckDeadline": grpc.unary_unary_rpc_method_handler( - servicer.ModifyAckDeadline, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Acknowledge": grpc.unary_unary_rpc_method_handler( - servicer.Acknowledge, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Pull": grpc.unary_unary_rpc_method_handler( - servicer.Pull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, - ), - "StreamingPull": grpc.stream_stream_rpc_method_handler( - servicer.StreamingPull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, - ), - "ModifyPushConfig": grpc.unary_unary_rpc_method_handler( - servicer.ModifyPushConfig, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.GetSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "ListSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, - ), - "CreateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.CreateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "UpdateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "DeleteSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Seek": grpc.unary_unary_rpc_method_handler( - servicer.Seek, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Subscriber", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Subscriber(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - @staticmethod - def CreateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyAckDeadline( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Acknowledge( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Acknowledge", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Pull( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Pull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def StreamingPull( - request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.stream_stream( - request_iterator, - target, - "/google.pubsub.v1.Subscriber/StreamingPull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyPushConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Seek( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Seek", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/pubsub_v1/publisher/_batch/thread.py b/google/cloud/pubsub_v1/publisher/_batch/thread.py index 67c9f2de3..fc4e6ba6d 100644 --- a/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -21,16 +21,19 @@ import six import google.api_core.exceptions -from google.cloud.pubsub_v1 import types +from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import base +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) _SERVER_PUBLISH_MAX_BYTES = 10 * 1000 * 1000 # max accepted size of PublishRequest +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() + class Batch(base.Batch): """A batch of messages. @@ -69,10 +72,19 @@ class Batch(base.Batch): at a lower level. commit_when_full (bool): Whether to commit the batch when the batch is full. + commit_retry (Optional[google.api_core.retry.Retry]): Designation of what + errors, if any, should be retried when commiting the batch. If not + provided, a default retry is used. """ def __init__( - self, client, topic, settings, batch_done_callback=None, commit_when_full=True + self, + client, + topic, + settings, + batch_done_callback=None, + commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, ): self._client = client self._topic = topic @@ -92,9 +104,11 @@ def __init__( # The initial size is not zero, we need to account for the size overhead # of the PublishRequest message itself. - self._base_request_size = types.PublishRequest(topic=topic).ByteSize() + self._base_request_size = gapic_types.PublishRequest(topic=topic)._pb.ByteSize() self._size = self._base_request_size + self._commit_retry = commit_retry + @staticmethod def make_lock(): """Return a threading lock. @@ -245,9 +259,10 @@ def _commit(self): batch_transport_succeeded = True try: - # Performs retries for errors defined in retry_codes.publish in the - # publisher_client_config.py file. - response = self._client.api.publish(self._topic, self._messages) + # Performs retries for errors defined by the retry configuration. + response = self._client.api.publish( + topic=self._topic, messages=self._messages, retry=self._commit_retry + ) except google.api_core.exceptions.GoogleAPIError as exc: # We failed to publish, even after retries, so set the exception on # all futures and exit. @@ -323,8 +338,12 @@ def publish(self, message): """ # Coerce the type, just in case. - if not isinstance(message, types.PubsubMessage): - message = types.PubsubMessage(**message) + if not isinstance(message, gapic_types.PubsubMessage): + # For performance reasons, the message should be constructed by directly + # using the raw protobuf class, and only then wrapping it into the + # higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message(**message) + message = gapic_types.PubsubMessage.wrap(vanilla_pb) future = None @@ -336,7 +355,9 @@ def publish(self, message): if self.status != base.BatchStatus.ACCEPTING_MESSAGES: return - size_increase = types.PublishRequest(messages=[message]).ByteSize() + size_increase = gapic_types.PublishRequest( + messages=[message] + )._pb.ByteSize() if (self._base_request_size + size_increase) > _SERVER_PUBLISH_MAX_BYTES: err_msg = ( diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/google/cloud/pubsub_v1/publisher/_sequencer/base.py index fda5c1ee9..3cfa809f7 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -48,11 +48,14 @@ def unpause(self, message): @staticmethod @abc.abstractmethod - def publish(self, message): + def publish(self, message, retry=None): """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index d8ddb3f8f..f7c0be084 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -17,6 +17,7 @@ import concurrent.futures as futures import threading +from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base @@ -225,9 +226,13 @@ def unpause(self): raise RuntimeError("Ordering key is not paused.") self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def _create_batch(self): + def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): """ Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -235,13 +240,17 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=self._batch_done_callback, commit_when_full=False, + commit_retry=commit_retry, ) - def publish(self, message): + def publish(self, message, retry=gapic_v1.method.DEFAULT): """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's @@ -278,13 +287,13 @@ def publish(self, message): ), "Publish is only allowed in accepting-messages state." if not self._ordered_batches: - new_batch = self._create_batch() + new_batch = self._create_batch(commit_retry=retry) self._ordered_batches.append(new_batch) batch = self._ordered_batches[-1] future = batch.publish(message) while future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry) self._ordered_batches.append(batch) future = batch.publish(message) diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index dff114630..d343ed945 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.api_core import gapic_v1 + from google.cloud.pubsub_v1.publisher._sequencer import base @@ -75,9 +77,13 @@ def unpause(self): """ Not relevant for this class. """ raise NotImplementedError - def _create_batch(self): + def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): """ Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -85,13 +91,17 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=commit_retry, ) - def publish(self, message): + def publish(self, message, retry=gapic_v1.method.DEFAULT): """ Batch message into existing or new batch. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: ~google.api_core.future.Future: An object conforming to @@ -109,7 +119,7 @@ def publish(self, message): raise RuntimeError("Unordered sequencer already stopped.") if not self._current_batch: - newbatch = self._create_batch() + newbatch = self._create_batch(commit_retry=retry) self._current_batch = newbatch batch = self._current_batch @@ -119,7 +129,7 @@ def publish(self, message): future = batch.publish(message) # batch is full, triggering commit_when_full if future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry) # At this point, we lose track of the old batch, but we don't # care since it's already committed (because it was full.) self._current_batch = batch diff --git a/google/cloud/pubsub_v1/publisher/client.py b/google/cloud/pubsub_v1/publisher/client.py index 8dbbea634..ea371190c 100644 --- a/google/cloud/pubsub_v1/publisher/client.py +++ b/google/cloud/pubsub_v1/publisher/client.py @@ -24,19 +24,23 @@ import grpc import six +from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.pubsub_v1.services.publisher.transports import ( + grpc as publisher_grpc_transport, +) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -48,6 +52,8 @@ "from_service_account_json", ) +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() + def _set_nested_value(container, value, keys): current = container @@ -75,11 +81,8 @@ class Client(object): arguments to the underlying :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. Generally you should not need to set additional keyword - arguments. Optionally, publish retry settings can be set via - ``client_config`` where user-provided retry configurations are - applied to default retry settings. And regional endpoints can be - set via ``client_options`` that takes a single key-value pair that - defines the endpoint. + arguments. Regional endpoints can be set via ``client_options`` that + takes a single key-value pair that defines the endpoint. Example: @@ -103,19 +106,6 @@ class Client(object): ), ), - # Optional - client_config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_params": { - "messaging": { - 'total_timeout_millis': 650000, # default: 600000 - } - } - } - } - }, - # Optional client_options = { "api_endpoint": REGIONAL_ENDPOINT @@ -173,22 +163,6 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # For a transient failure, retry publishing the message infinitely. self.publisher_options = types.PublisherOptions(*publisher_options) self._enable_message_ordering = self.publisher_options[0] - if self._enable_message_ordering: - # Set retry timeout to "infinite" when message ordering is enabled. - # Note that this then also impacts messages added with an empty ordering - # key. - client_config = _set_nested_value( - kwargs.pop("client_config", {}), - 2 ** 32, - [ - "interfaces", - "google.pubsub.v1.Publisher", - "retry_params", - "messaging", - "total_timeout_millis", - ], - ) - kwargs["client_config"] = client_config # Add the metrics headers, and instantiate the underlying GAPIC # client. @@ -292,7 +266,9 @@ def resume_publish(self, topic, ordering_key): else: sequencer.unpause() - def publish(self, topic, data, ordering_key="", **attrs): + def publish( + self, topic, data, ordering_key="", retry=gapic_v1.method.DEFAULT, **attrs + ): """Publish a single message. .. note:: @@ -327,6 +303,9 @@ def publish(self, topic, data, ordering_key="", **attrs): enabled for this client to use this feature. EXPERIMENTAL: This feature is currently available in a closed alpha. Please contact the Cloud Pub/Sub team to use it. + retry (Optional[google.api_core.retry.Retry]): Designation of what + errors, if any, should be retried. If `ordering_key` is specified, + the total retry deadline will be changed to "infinity". attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) @@ -369,10 +348,13 @@ def publish(self, topic, data, ordering_key="", **attrs): "be sent as text strings." ) - # Create the Pub/Sub message object. - message = types.PubsubMessage( + # Create the Pub/Sub message object. For performance reasons, the message + # should be constructed by directly using the raw protobuf class, and only + # then wrapping it into the higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message( data=data, ordering_key=ordering_key, attributes=attrs ) + message = gapic_types.PubsubMessage.wrap(vanilla_pb) # Messages should go through flow control to prevent excessive # queuing on the client side (depending on the settings). @@ -390,10 +372,19 @@ def on_publish_done(future): if self._is_stopped: raise RuntimeError("Cannot publish on a stopped publisher.") - sequencer = self._get_or_create_sequencer(topic, ordering_key) + # Set retry timeout to "infinite" when message ordering is enabled. + # Note that this then also impacts messages added with an empty + # ordering key. + if self._enable_message_ordering: + if retry is gapic_v1.method.DEFAULT: + # use the default retry for the publish GRPC method as a base + transport = self.api._transport + retry = transport._wrapped_methods[transport.publish]._retry + retry = retry.with_deadline(2.0 ** 32) # Delegate the publishing to the sequencer. - future = sequencer.publish(message) + sequencer = self._get_or_create_sequencer(topic, ordering_key) + future = sequencer.publish(message, retry=retry) future.add_done_callback(on_publish_done) # Create a timer thread if necessary to enforce the batching diff --git a/google/cloud/pubsub_v1/publisher/flow_controller.py b/google/cloud/pubsub_v1/publisher/flow_controller.py index c10fadcef..300e273aa 100644 --- a/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -88,7 +88,7 @@ def add(self, message): with self._operational_lock: if not self._would_overflow(message): self._message_count += 1 - self._total_bytes += message.ByteSize() + self._total_bytes += message._pb.ByteSize() return # Adding a message would overflow, react. @@ -101,7 +101,7 @@ def add(self, message): # load if we accepted the message. load_info = self._load_info( message_count=self._message_count + 1, - total_bytes=self._total_bytes + message.ByteSize(), + total_bytes=self._total_bytes + message._pb.ByteSize(), ) error_msg = "Flow control limits would be exceeded - {}.".format( load_info @@ -116,11 +116,11 @@ def add(self, message): # Sanity check - if a message exceeds total flow control limits all # by itself, it would block forever, thus raise error. if ( - message.ByteSize() > self._settings.byte_limit + message._pb.ByteSize() > self._settings.byte_limit or self._settings.message_limit < 1 ): load_info = self._load_info( - message_count=1, total_bytes=message.ByteSize() + message_count=1, total_bytes=message._pb.ByteSize() ) error_msg = ( "Total flow control limits too low for the message, " @@ -134,7 +134,7 @@ def add(self, message): if current_thread not in self._byte_reservations: self._waiting.append(current_thread) self._byte_reservations[current_thread] = _QuantityReservation( - reserved=0, needed=message.ByteSize() + reserved=0, needed=message._pb.ByteSize() ) _LOGGER.debug( @@ -151,7 +151,7 @@ def add(self, message): # Message accepted, increase the load and remove thread stats. self._message_count += 1 - self._total_bytes += message.ByteSize() + self._total_bytes += message._pb.ByteSize() self._reserved_bytes -= self._byte_reservations[current_thread].reserved del self._byte_reservations[current_thread] self._waiting.remove(current_thread) @@ -169,7 +169,7 @@ def release(self, message): with self._operational_lock: # Releasing a message decreases the load. self._message_count -= 1 - self._total_bytes -= message.ByteSize() + self._total_bytes -= message._pb.ByteSize() if self._message_count < 0 or self._total_bytes < 0: warnings.warn( @@ -252,7 +252,7 @@ def _would_overflow(self, message): else: enough_reserved = False - bytes_taken = self._total_bytes + self._reserved_bytes + message.ByteSize() + bytes_taken = self._total_bytes + self._reserved_bytes + message._pb.ByteSize() size_overflow = bytes_taken > self._settings.byte_limit and not enough_reserved msg_count_overflow = self._message_count + 1 > self._settings.message_limit diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index dd324fe21..7a8950844 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -21,9 +21,9 @@ import math import threading -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) @@ -140,7 +140,7 @@ def ack(self, items): total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE) ) self._manager.send(request) @@ -181,7 +181,7 @@ def modify_ack_deadline(self, items): total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( modify_deadline_ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE), modify_deadline_seconds=itertools.islice(seconds, _ACK_IDS_BATCH_SIZE), ) diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 4e3f24933..7476e887b 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -34,6 +34,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import requests import google.cloud.pubsub_v1.subscriber.message import google.cloud.pubsub_v1.subscriber.scheduler +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" @@ -369,7 +370,7 @@ def _send_unary_request(self, request): stream. Args: - request (types.StreamingPullRequest): The stream request to be + request (gapic_types.StreamingPullRequest): The stream request to be mapped into unary requests. """ if request.ack_ids: @@ -430,7 +431,7 @@ def heartbeat(self): ``self._UNARY_REQUESTS`` is set or not. """ if self._rpc is not None and self._rpc.is_active: - self._rpc.send(types.StreamingPullRequest()) + self._rpc.send(gapic_types.StreamingPullRequest()) def open(self, callback, on_callback_error): """Begin consuming messages. @@ -555,7 +556,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): The default message acknowledge deadline for the stream. Returns: - google.cloud.pubsub_v1.types.StreamingPullRequest: A request + google.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not suitable for any other purpose). """ @@ -569,7 +570,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): lease_ids = [] # Put the request together. - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), stream_ack_deadline_seconds=stream_ack_deadline_seconds, @@ -601,9 +602,13 @@ def _on_response(self, response): ) return + # IMPORTANT: Circumvent the wrapper class and operate on the raw underlying + # protobuf message to significantly gain on attribute access performance. + received_messages = response._pb.received_messages + _LOGGER.debug( "Processing %s received message(s), currently on hold %s (bytes %s).", - len(response.received_messages), + len(received_messages), self._messages_on_hold.size, self._on_hold_bytes, ) @@ -613,12 +618,12 @@ def _on_response(self, response): # received them. items = [ requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages + for message in received_messages ] self._dispatcher.modify_ack_deadline(items) with self._pause_resume_lock: - for received_message in response.received_messages: + for received_message in received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( received_message.message, received_message.ack_id, diff --git a/google/cloud/pubsub_v1/subscriber/client.py b/google/cloud/pubsub_v1/subscriber/client.py index 00c8f2498..98d6d75c7 100644 --- a/google/cloud/pubsub_v1/subscriber/client.py +++ b/google/cloud/pubsub_v1/subscriber/client.py @@ -25,10 +25,12 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1.services.subscriber.transports import ( + grpc as subscriber_grpc_transport, +) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -250,7 +252,7 @@ def close(self): This method is idempotent. """ - self.api.transport.channel.close() + self.api._transport.grpc_channel.close() def __enter__(self): return self diff --git a/google/cloud/pubsub_v1/subscriber/message.py b/google/cloud/pubsub_v1/subscriber/message.py index 864d697e0..c08e0a605 100644 --- a/google/cloud/pubsub_v1/subscriber/message.py +++ b/google/cloud/pubsub_v1/subscriber/message.py @@ -14,12 +14,12 @@ from __future__ import absolute_import -import datetime +import datetime as dt import json import math +import pytz import time -from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -81,7 +81,9 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): Args: message (~.pubsub_v1.types.PubsubMessage): The message received - from Pub/Sub. + from Pub/Sub. For performance reasons it should be the the raw + protobuf message wrapped by the ``PubsubMessage`` class obtained + through the message's ``.pb()`` method. ack_id (str): The ack_id received from Pub/Sub. delivery_attempt (int): The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy is set on the subscription, @@ -101,6 +103,18 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): # the default lease deadline. self._received_timestamp = time.time() + # Store the message attributes directly to speed up attribute access, i.e. + # to avoid two lookups if self._message. pattern was used in + # properties. + self._attributes = message.attributes + self._data = message.data + self._publish_time = dt.datetime.fromtimestamp( + message.publish_time.seconds + message.publish_time.nanos / 1e9, + tz=pytz.UTC, + ) + self._ordering_key = message.ordering_key + self._size = message.ByteSize() + def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data @@ -132,7 +146,7 @@ def attributes(self): .ScalarMapContainer: The message's attributes. This is a ``dict``-like object provided by ``google.protobuf``. """ - return self._message.attributes + return self._attributes @property def data(self): @@ -142,7 +156,7 @@ def data(self): bytes: The message data. This is always a bytestring; if you want a text string, call :meth:`bytes.decode`. """ - return self._message.data + return self._data @property def publish_time(self): @@ -151,21 +165,17 @@ def publish_time(self): Returns: datetime: The date and time that the message was published. """ - timestamp = self._message.publish_time - delta = datetime.timedelta( - seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000 - ) - return datetime_helpers._UTC_EPOCH + delta + return self._publish_time @property def ordering_key(self): """str: the ordering key used to publish the message.""" - return self._message.ordering_key + return self._ordering_key @property def size(self): """Return the size of the underlying message, in bytes.""" - return self._message.ByteSize() + return self._size @property def ack_id(self): diff --git a/google/cloud/pubsub_v1/types.py b/google/cloud/pubsub_v1/types.py index b52b3ea60..b875f3cd2 100644 --- a/google/cloud/pubsub_v1/types.py +++ b/google/cloud/pubsub_v1/types.py @@ -16,8 +16,11 @@ import collections import enum +import inspect import sys +import proto + from google.api import http_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 @@ -29,7 +32,8 @@ from google.protobuf import timestamp_pb2 from google.api_core.protobuf_helpers import get_messages -from google.cloud.pubsub_v1.proto import pubsub_pb2 + +from google.pubsub_v1.types import pubsub as pubsub_gapic_types # Define the default values for batching. @@ -45,23 +49,21 @@ 0.01, # max_latency: 10 ms 100, # max_messages: 100 ) - -if sys.version_info >= (3, 5): - BatchSettings.__doc__ = "The settings for batch publishing the messages." - BatchSettings.max_bytes.__doc__ = ( - "The maximum total size of the messages to collect before automatically " - "publishing the batch, including any byte size overhead of the publish " - "request itself. The maximum value is bound by the server-side limit of " - "10_000_000 bytes." - ) - BatchSettings.max_latency.__doc__ = ( - "The maximum number of seconds to wait for additional messages before " - "automatically publishing the batch." - ) - BatchSettings.max_messages.__doc__ = ( - "The maximum number of messages to collect before automatically " - "publishing the batch." - ) +BatchSettings.__doc__ = "The settings for batch publishing the messages." +BatchSettings.max_bytes.__doc__ = ( + "The maximum total size of the messages to collect before automatically " + "publishing the batch, including any byte size overhead of the publish " + "request itself. The maximum value is bound by the server-side limit of " + "10_000_000 bytes." +) +BatchSettings.max_latency.__doc__ = ( + "The maximum number of seconds to wait for additional messages before " + "automatically publishing the batch." +) +BatchSettings.max_messages.__doc__ = ( + "The maximum number of messages to collect before automatically " + "publishing the batch." +) class LimitExceededBehavior(str, enum.Enum): @@ -80,20 +82,16 @@ class LimitExceededBehavior(str, enum.Enum): 10 * BatchSettings.__new__.__defaults__[0], # byte limit LimitExceededBehavior.IGNORE, # desired behavior ) - -if sys.version_info >= (3, 5): - PublishFlowControl.__doc__ = ( - "The client flow control settings for message publishing." - ) - PublishFlowControl.message_limit.__doc__ = ( - "The maximum number of messages awaiting to be published." - ) - PublishFlowControl.byte_limit.__doc__ = ( - "The maximum total size of messages awaiting to be published." - ) - PublishFlowControl.limit_exceeded_behavior.__doc__ = ( - "The action to take when publish flow control limits are exceeded." - ) +PublishFlowControl.__doc__ = "The client flow control settings for message publishing." +PublishFlowControl.message_limit.__doc__ = ( + "The maximum number of messages awaiting to be published." +) +PublishFlowControl.byte_limit.__doc__ = ( + "The maximum total size of messages awaiting to be published." +) +PublishFlowControl.limit_exceeded_behavior.__doc__ = ( + "The action to take when publish flow control limits are exceeded." +) # Define the default publisher options. # @@ -106,20 +104,17 @@ class LimitExceededBehavior(str, enum.Enum): False, # enable_message_ordering: False PublishFlowControl(), # default flow control settings ) - -if sys.version_info >= (3, 5): - PublisherOptions.__doc__ = "The options for the publisher client." - PublisherOptions.enable_message_ordering.__doc__ = ( - "Whether to order messages in a batch by a supplied ordering key." - "EXPERIMENTAL: Message ordering is an alpha feature that requires " - "special permissions to use. Please contact the Cloud Pub/Sub team for " - "more information." - ) - PublisherOptions.flow_control.__doc__ = ( - "Flow control settings for message publishing by the client. By default " - "the publisher client does not do any throttling." - ) - +PublisherOptions.__doc__ = "The options for the publisher client." +PublisherOptions.enable_message_ordering.__doc__ = ( + "Whether to order messages in a batch by a supplied ordering key." + "EXPERIMENTAL: Message ordering is an alpha feature that requires " + "special permissions to use. Please contact the Cloud Pub/Sub team for " + "more information." +) +PublisherOptions.flow_control.__doc__ = ( + "Flow control settings for message publishing by the client. By default " + "the publisher client does not do any throttling." +) # Define the type class and default values for flow control settings. # @@ -141,29 +136,50 @@ class LimitExceededBehavior(str, enum.Enum): 1 * 60 * 60, # max_lease_duration: 1 hour. 0, # max_duration_per_lease_extension: disabled ) +FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." +) +FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." +) +FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." +) +FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." +) +FlowControl.max_duration_per_lease_extension.__doc__ = ( + "The max amount of time in seconds for a single lease extension attempt. " + "Bounds the delay before a message redelivery if the subscriber " + "fails to extend the deadline." +) + + +# The current api core helper does not find new proto messages of type proto.Message, +# thus we need our own helper. Adjusted from +# https://github.com/googleapis/python-api-core/blob/8595f620e7d8295b6a379d6fd7979af3bef717e2/google/api_core/protobuf_helpers.py#L101-L118 +def _get_protobuf_messages(module): + """Discover all protobuf Message classes in a given import module. -if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." - ) - FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." - ) - FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." - ) - FlowControl.max_duration_per_lease_extension.__doc__ = ( - "The max amount of time in seconds for a single lease extension attempt. " - "Bounds the delay before a message redelivery if the subscriber " - "fails to extend the deadline." - ) + Args: + module (module): A Python module; :func:`dir` will be run against this + module to find Message subclasses. + + Returns: + dict[str, proto.Message]: A dictionary with the + Message class names as keys, and the Message subclasses themselves + as values. + """ + answer = collections.OrderedDict() + for name in dir(module): + candidate = getattr(module, name) + if inspect.isclass(candidate) and issubclass(candidate, proto.Message): + answer[name] = candidate + return answer _shared_modules = [ @@ -178,11 +194,15 @@ class LimitExceededBehavior(str, enum.Enum): timestamp_pb2, ] -_local_modules = [pubsub_pb2] - - -names = ["BatchSettings", "FlowControl"] +_local_modules = [pubsub_gapic_types] +names = [ + "BatchSettings", + "LimitExceededBehavior", + "PublishFlowControl", + "PublisherOptions", + "FlowControl", +] for module in _shared_modules: for name, message in get_messages(module).items(): @@ -190,7 +210,7 @@ class LimitExceededBehavior(str, enum.Enum): names.append(name) for module in _local_modules: - for name, message in get_messages(module).items(): + for name, message in _get_protobuf_messages(module).items(): message.__module__ = "google.cloud.pubsub_v1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/google/pubsub/__init__.py b/google/pubsub/__init__.py new file mode 100644 index 000000000..f441543cc --- /dev/null +++ b/google/pubsub/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient +from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber.client import SubscriberClient +from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import CreateSnapshotRequest +from google.pubsub_v1.types.pubsub import DeadLetterPolicy +from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest +from google.pubsub_v1.types.pubsub import DeleteSubscriptionRequest +from google.pubsub_v1.types.pubsub import DeleteTopicRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionResponse +from google.pubsub_v1.types.pubsub import ExpirationPolicy +from google.pubsub_v1.types.pubsub import GetSnapshotRequest +from google.pubsub_v1.types.pubsub import GetSubscriptionRequest +from google.pubsub_v1.types.pubsub import GetTopicRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicsRequest +from google.pubsub_v1.types.pubsub import ListTopicsResponse +from google.pubsub_v1.types.pubsub import MessageStoragePolicy +from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest +from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest +from google.pubsub_v1.types.pubsub import PublishRequest +from google.pubsub_v1.types.pubsub import PublishResponse +from google.pubsub_v1.types.pubsub import PubsubMessage +from google.pubsub_v1.types.pubsub import PullRequest +from google.pubsub_v1.types.pubsub import PullResponse +from google.pubsub_v1.types.pubsub import PushConfig +from google.pubsub_v1.types.pubsub import ReceivedMessage +from google.pubsub_v1.types.pubsub import RetryPolicy +from google.pubsub_v1.types.pubsub import SeekRequest +from google.pubsub_v1.types.pubsub import SeekResponse +from google.pubsub_v1.types.pubsub import Snapshot +from google.pubsub_v1.types.pubsub import StreamingPullRequest +from google.pubsub_v1.types.pubsub import StreamingPullResponse +from google.pubsub_v1.types.pubsub import Subscription +from google.pubsub_v1.types.pubsub import Topic +from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest +from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest +from google.pubsub_v1.types.pubsub import UpdateTopicRequest + +__all__ = ( + "AcknowledgeRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "MessageStoragePolicy", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PublishRequest", + "PublishResponse", + "PublisherAsyncClient", + "PublisherClient", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "SubscriberAsyncClient", + "SubscriberClient", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", +) diff --git a/google/pubsub/py.typed b/google/pubsub/py.typed new file mode 100644 index 000000000..9b0e37433 --- /dev/null +++ b/google/pubsub/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-pubsub package uses inline types. diff --git a/google/pubsub_v1/__init__.py b/google/pubsub_v1/__init__.py new file mode 100644 index 000000000..5e7a6cc72 --- /dev/null +++ b/google/pubsub_v1/__init__.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.publisher import PublisherClient +from .services.subscriber import SubscriberClient +from .types.pubsub import AcknowledgeRequest +from .types.pubsub import CreateSnapshotRequest +from .types.pubsub import DeadLetterPolicy +from .types.pubsub import DeleteSnapshotRequest +from .types.pubsub import DeleteSubscriptionRequest +from .types.pubsub import DeleteTopicRequest +from .types.pubsub import DetachSubscriptionRequest +from .types.pubsub import DetachSubscriptionResponse +from .types.pubsub import ExpirationPolicy +from .types.pubsub import GetSnapshotRequest +from .types.pubsub import GetSubscriptionRequest +from .types.pubsub import GetTopicRequest +from .types.pubsub import ListSnapshotsRequest +from .types.pubsub import ListSnapshotsResponse +from .types.pubsub import ListSubscriptionsRequest +from .types.pubsub import ListSubscriptionsResponse +from .types.pubsub import ListTopicSnapshotsRequest +from .types.pubsub import ListTopicSnapshotsResponse +from .types.pubsub import ListTopicSubscriptionsRequest +from .types.pubsub import ListTopicSubscriptionsResponse +from .types.pubsub import ListTopicsRequest +from .types.pubsub import ListTopicsResponse +from .types.pubsub import MessageStoragePolicy +from .types.pubsub import ModifyAckDeadlineRequest +from .types.pubsub import ModifyPushConfigRequest +from .types.pubsub import PublishRequest +from .types.pubsub import PublishResponse +from .types.pubsub import PubsubMessage +from .types.pubsub import PullRequest +from .types.pubsub import PullResponse +from .types.pubsub import PushConfig +from .types.pubsub import ReceivedMessage +from .types.pubsub import RetryPolicy +from .types.pubsub import SeekRequest +from .types.pubsub import SeekResponse +from .types.pubsub import Snapshot +from .types.pubsub import StreamingPullRequest +from .types.pubsub import StreamingPullResponse +from .types.pubsub import Subscription +from .types.pubsub import Topic +from .types.pubsub import UpdateSnapshotRequest +from .types.pubsub import UpdateSubscriptionRequest +from .types.pubsub import UpdateTopicRequest + + +__all__ = ( + "AcknowledgeRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "MessageStoragePolicy", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PublishRequest", + "PublishResponse", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "SubscriberClient", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "PublisherClient", +) diff --git a/google/pubsub_v1/py.typed b/google/pubsub_v1/py.typed new file mode 100644 index 000000000..9b0e37433 --- /dev/null +++ b/google/pubsub_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-pubsub package uses inline types. diff --git a/google/pubsub_v1/services/__init__.py b/google/pubsub_v1/services/__init__.py new file mode 100644 index 000000000..42ffdf2bc --- /dev/null +++ b/google/pubsub_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/system/gapic/v1/test_system_publisher_v1.py b/google/pubsub_v1/services/publisher/__init__.py similarity index 57% rename from tests/system/gapic/v1/test_system_publisher_v1.py rename to google/pubsub_v1/services/publisher/__init__.py index 2ccebf07f..970a1a3b4 100644 --- a/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/google/pubsub_v1/services/publisher/__init__.py @@ -1,30 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -import os -import time - -from google.cloud import pubsub_v1 -from google.cloud.pubsub_v1.proto import pubsub_pb2 - - -class TestSystemPublisher(object): - def test_list_topics(self): - project_id = os.environ["PROJECT_ID"] +from .client import PublisherClient +from .async_client import PublisherAsyncClient - client = pubsub_v1.PublisherClient() - project = client.project_path(project_id) - response = client.list_topics(project) +__all__ = ( + "PublisherClient", + "PublisherAsyncClient", +) diff --git a/google/pubsub_v1/services/publisher/async_client.py b/google/pubsub_v1/services/publisher/async_client.py new file mode 100644 index 000000000..6338887a7 --- /dev/null +++ b/google/pubsub_v1/services/publisher/async_client.py @@ -0,0 +1,1099 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .client import PublisherClient + + +class PublisherAsyncClient: + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + _client: PublisherClient + + DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + + topic_path = staticmethod(PublisherClient.topic_path) + + from_service_account_file = PublisherClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(PublisherClient).get_transport_class, type(PublisherClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PublisherTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the publisher client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = PublisherClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_topic( + self, + request: pubsub.Topic = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the `resource + name + rules `__. + + Args: + request (:class:`~.pubsub.Topic`): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_topic( + self, + request: pubsub.UpdateTopicRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Args: + request (:class:`~.pubsub.UpdateTopicRequest`): + The request object. Request for the UpdateTopic method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def publish( + self, + request: pubsub.PublishRequest = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Args: + request (:class:`~.pubsub.PublishRequest`): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic, messages]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.publish, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.Cancelled, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + exceptions.InternalServerError, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_topic( + self, + request: pubsub.GetTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + Args: + request (:class:`~.pubsub.GetTopicRequest`): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsAsyncPager: + r"""Lists matching topics. + + Args: + request (:class:`~.pubsub.ListTopicsRequest`): + The request object. Request for the `ListTopics` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicsAsyncPager: + Response for the ``ListTopics`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsAsyncPager: + r"""Lists the names of the attached subscriptions on this + topic. + + Args: + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSubscriptionsAsyncPager: + Response for the ``ListTopicSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSubscriptionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsAsyncPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The request object. Request for the `ListTopicSnapshots` + method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSnapshotsAsyncPager: + Response for the ``ListTopicSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSnapshotsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_topic( + self, + request: pubsub.DeleteTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Args: + request (:class:`~.pubsub.DeleteTopicRequest`): + The request object. Request for the `DeleteTopic` + method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Args: + request (:class:`~.pubsub.DetachSubscriptionRequest`): + The request object. Request for the DetachSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.detach_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PublisherAsyncClient",) diff --git a/google/pubsub_v1/services/publisher/client.py b/google/pubsub_v1/services/publisher/client.py new file mode 100644 index 000000000..22225b83f --- /dev/null +++ b/google/pubsub_v1/services/publisher/client.py @@ -0,0 +1,1205 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PublisherGrpcTransport +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport + + +class PublisherClientMeta(type): + """Metaclass for the Publisher client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] + _transport_registry["grpc"] = PublisherGrpcTransport + _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[PublisherTransport]: + """Return an appropriate transport class. + + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublisherClient(metaclass=PublisherClientMeta): + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def topic_path(project: str, topic: str,) -> str: + """Return a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parse a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PublisherTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the publisher client. + + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PublisherTransport): + # transport is a PublisherTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_topic( + self, + request: pubsub.Topic = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the `resource + name + rules `__. + + + Args: + request (:class:`~.pubsub.Topic`): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Topic. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_topic( + self, + request: pubsub.UpdateTopicRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + + Args: + request (:class:`~.pubsub.UpdateTopicRequest`): + The request object. Request for the UpdateTopic method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def publish( + self, + request: pubsub.PublishRequest = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + + Args: + request (:class:`~.pubsub.PublishRequest`): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, messages]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PublishRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.publish] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_topic( + self, + request: pubsub.GetTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + + Args: + request (:class:`~.pubsub.GetTopicRequest`): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: + r"""Lists matching topics. + + + Args: + request (:class:`~.pubsub.ListTopicsRequest`): + The request object. Request for the `ListTopics` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicsPager: + Response for the ``ListTopics`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsPager: + r"""Lists the names of the attached subscriptions on this + topic. + + + Args: + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSubscriptionsPager: + Response for the ``ListTopicSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSubscriptionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The request object. Request for the `ListTopicSnapshots` + method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSnapshotsPager: + Response for the ``ListTopicSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSnapshotsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_topic( + self, + request: pubsub.DeleteTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + + Args: + request (:class:`~.pubsub.DeleteTopicRequest`): + The request object. Request for the `DeleteTopic` + method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + + Args: + request (:class:`~.pubsub.DetachSubscriptionRequest`): + The request object. Request for the DetachSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DetachSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PublisherClient",) diff --git a/google/pubsub_v1/services/publisher/pagers.py b/google/pubsub_v1/services/publisher/pagers.py new file mode 100644 index 000000000..52242ff17 --- /dev/null +++ b/google/pubsub_v1/services/publisher/pagers.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.pubsub_v1.types import pubsub + + +class ListTopicsPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicsResponse], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Topic]: + for page in self.pages: + yield from page.topics + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicsAsyncPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicsResponse]], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Topic]: + async def async_generator(): + async for page in self.pages: + for response in page.topics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSubscriptionsResponse], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsAsyncPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSubscriptionsResponse]], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSnapshotsResponse], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsAsyncPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSnapshotsResponse]], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/pubsub_v1/services/publisher/transports/__init__.py b/google/pubsub_v1/services/publisher/transports/__init__.py new file mode 100644 index 000000000..bf7dc8f2b --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublisherTransport +from .grpc import PublisherGrpcTransport +from .grpc_asyncio import PublisherGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] +_transport_registry["grpc"] = PublisherGrpcTransport +_transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + + +__all__ = ( + "PublisherTransport", + "PublisherGrpcTransport", + "PublisherGrpcAsyncIOTransport", +) diff --git a/google/pubsub_v1/services/publisher/transports/base.py b/google/pubsub_v1/services/publisher/transports/base.py new file mode 100644 index 000000000..9a99a899e --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/base.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class PublisherTransport(abc.ABC): + """Abstract transport class for Publisher.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_topic: gapic_v1.method.wrap_method( + self.create_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method.wrap_method( + self.update_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: gapic_v1.method.wrap_method( + self.publish, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.Cancelled, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + exceptions.InternalServerError, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method.wrap_method( + self.get_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method.wrap_method( + self.list_topics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: gapic_v1.method.wrap_method( + self.list_topic_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: gapic_v1.method.wrap_method( + self.list_topic_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method.wrap_method( + self.delete_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: gapic_v1.method.wrap_method( + self.detach_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_topic( + self, + ) -> typing.Callable[ + [pubsub.Topic], typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]] + ]: + raise NotImplementedError() + + @property + def update_topic( + self, + ) -> typing.Callable[ + [pubsub.UpdateTopicRequest], + typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ]: + raise NotImplementedError() + + @property + def publish( + self, + ) -> typing.Callable[ + [pubsub.PublishRequest], + typing.Union[pubsub.PublishResponse, typing.Awaitable[pubsub.PublishResponse]], + ]: + raise NotImplementedError() + + @property + def get_topic( + self, + ) -> typing.Callable[ + [pubsub.GetTopicRequest], + typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ]: + raise NotImplementedError() + + @property + def list_topics( + self, + ) -> typing.Callable[ + [pubsub.ListTopicsRequest], + typing.Union[ + pubsub.ListTopicsResponse, typing.Awaitable[pubsub.ListTopicsResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_topic_subscriptions( + self, + ) -> typing.Callable[ + [pubsub.ListTopicSubscriptionsRequest], + typing.Union[ + pubsub.ListTopicSubscriptionsResponse, + typing.Awaitable[pubsub.ListTopicSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_topic_snapshots( + self, + ) -> typing.Callable[ + [pubsub.ListTopicSnapshotsRequest], + typing.Union[ + pubsub.ListTopicSnapshotsResponse, + typing.Awaitable[pubsub.ListTopicSnapshotsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_topic( + self, + ) -> typing.Callable[ + [pubsub.DeleteTopicRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def detach_subscription( + self, + ) -> typing.Callable[ + [pubsub.DetachSubscriptionRequest], + typing.Union[ + pubsub.DetachSubscriptionResponse, + typing.Awaitable[pubsub.DetachSubscriptionResponse], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PublisherTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/grpc.py b/google/pubsub_v1/services/publisher/transports/grpc.py new file mode 100644 index 000000000..ad5f95684 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/grpc.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + + +class PublisherGrpcTransport(PublisherTransport): + """gRPC backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the `resource + name + rules `__. + + Returns: + Callable[[~.Topic], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + ~.PublishResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], pubsub.ListTopicsResponse]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + ~.ListTopicsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], pubsub.ListTopicSubscriptionsResponse + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + ~.ListTopicSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], pubsub.ListTopicSnapshotsResponse + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + ~.ListTopicSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty.Empty]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], pubsub.DetachSubscriptionResponse + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + ~.DetachSubscriptionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PublisherGrpcTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py new file mode 100644 index 000000000..5d70e3d62 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import PublisherTransport, DEFAULT_CLIENT_INFO +from .grpc import PublisherGrpcTransport + + +class PublisherGrpcAsyncIOTransport(PublisherTransport): + """gRPC AsyncIO backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the `resource + name + rules `__. + + Returns: + Callable[[~.Topic], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic( + self, + ) -> Callable[[pubsub.UpdateTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish( + self, + ) -> Callable[[pubsub.PublishRequest], Awaitable[pubsub.PublishResponse]]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + Awaitable[~.PublishResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], Awaitable[pubsub.ListTopicsResponse]]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + Awaitable[~.ListTopicsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Awaitable[pubsub.ListTopicSubscriptionsResponse], + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + Awaitable[~.ListTopicSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], Awaitable[pubsub.ListTopicSnapshotsResponse] + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + Awaitable[~.ListTopicSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic( + self, + ) -> Callable[[pubsub.DeleteTopicRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], Awaitable[pubsub.DetachSubscriptionResponse] + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + Awaitable[~.DetachSubscriptionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/google/pubsub_v1/services/subscriber/__init__.py b/google/pubsub_v1/services/subscriber/__init__.py new file mode 100644 index 000000000..361085a5e --- /dev/null +++ b/google/pubsub_v1/services/subscriber/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import SubscriberClient +from .async_client import SubscriberAsyncClient + +__all__ = ( + "SubscriberClient", + "SubscriberAsyncClient", +) diff --git a/google/pubsub_v1/services/subscriber/async_client.py b/google/pubsub_v1/services/subscriber/async_client.py new file mode 100644 index 000000000..a169e3780 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/async_client.py @@ -0,0 +1,1826 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, AsyncIterable, AsyncIterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .client import SubscriberClient + + +class SubscriberAsyncClient: + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + _client: SubscriberClient + + DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT + + snapshot_path = staticmethod(SubscriberClient.snapshot_path) + + subscription_path = staticmethod(SubscriberClient.subscription_path) + + from_service_account_file = SubscriberClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(SubscriberClient).get_transport_class, type(SubscriberClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, SubscriberTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the subscriber client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = SubscriberClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_subscription( + self, + request: pubsub.Subscription = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Args: + request (:class:`~.pubsub.Subscription`): + The request object. A subscription resource. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + If push delivery is used with this subscription, this + field is used to configure it. An empty ``pushConfig`` + signifies that the subscriber will pull and ack messages + using API methods. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [name, topic, push_config, ack_deadline_seconds] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_subscription( + self, + request: pubsub.GetSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + Args: + request (:class:`~.pubsub.GetSubscriptionRequest`): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Args: + request (:class:`~.pubsub.UpdateSubscriptionRequest`): + The request object. Request for the UpdateSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsAsyncPager: + r"""Lists matching subscriptions. + + Args: + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The request object. Request for the `ListSubscriptions` + method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSubscriptionsAsyncPager: + Response for the ``ListSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubscriptionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Args: + request (:class:`~.pubsub.DeleteSubscriptionRequest`): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Args: + request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, ack_ids, ack_deadline_seconds]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_ack_deadline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def acknowledge( + self, + request: pubsub.AcknowledgeRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Args: + request (:class:`~.pubsub.AcknowledgeRequest`): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, ack_ids]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def pull( + self, + request: pubsub.PullRequest = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Args: + request (:class:`~.pubsub.PullRequest`): + The request object. Request for the `Pull` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [subscription, return_immediately, max_messages] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: AsyncIterator[pubsub.StreamingPullRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> AsyncIterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Args: + requests (AsyncIterator[`~.pubsub.StreamingPullRequest`]): + The request object AsyncIterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[~.pubsub.StreamingPullResponse]: + Response for the ``StreamingPull`` method. This response + is used to stream messages from the server to the + client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + ), + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Args: + request (:class:`~.pubsub.ModifyPushConfigRequest`): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, push_config]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_push_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_snapshot( + self, + request: pubsub.GetSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.GetSnapshotRequest`): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([snapshot]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_snapshots( + self, + request: pubsub.ListSnapshotsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsAsyncPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.ListSnapshotsRequest`): + The request object. Request for the `ListSnapshots` + method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSnapshotsAsyncPager: + Response for the ``ListSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSnapshotsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_snapshot( + self, + request: pubsub.CreateSnapshotRequest = None, + *, + name: str = None, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Args: + request (:class:`~.pubsub.CreateSnapshotRequest`): + The request object. Request for the `CreateSnapshot` + method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_snapshot( + self, + request: pubsub.UpdateSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Args: + request (:class:`~.pubsub.UpdateSnapshotRequest`): + The request object. Request for the UpdateSnapshot + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_snapshot( + self, + request: pubsub.DeleteSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Args: + request (:class:`~.pubsub.DeleteSnapshotRequest`): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([snapshot]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def seek( + self, + request: pubsub.SeekRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Args: + request (:class:`~.pubsub.SeekRequest`): + The request object. Request for the `Seek` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + # Create or coerce a protobuf request object. + + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seek, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SubscriberAsyncClient",) diff --git a/google/pubsub_v1/services/subscriber/client.py b/google/pubsub_v1/services/subscriber/client.py new file mode 100644 index 000000000..58a7cd1f9 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/client.py @@ -0,0 +1,1910 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Iterable, Iterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SubscriberGrpcTransport +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +class SubscriberClientMeta(type): + """Metaclass for the Subscriber client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] + _transport_registry["grpc"] = SubscriberGrpcTransport + _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[SubscriberTransport]: + """Return an appropriate transport class. + + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SubscriberClient(metaclass=SubscriberClientMeta): + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def snapshot_path(project: str, snapshot: str,) -> str: + """Return a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format( + project=project, snapshot=snapshot, + ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str, str]: + """Parse a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str, subscription: str,) -> str: + """Return a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parse a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, SubscriberTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the subscriber client. + + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SubscriberTransport): + # transport is a SubscriberTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_subscription( + self, + request: pubsub.Subscription = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + + Args: + request (:class:`~.pubsub.Subscription`): + The request object. A subscription resource. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + If push delivery is used with this subscription, this + field is used to configure it. An empty ``pushConfig`` + signifies that the subscriber will pull and ack messages + using API methods. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Subscription. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_subscription( + self, + request: pubsub.GetSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + + Args: + request (:class:`~.pubsub.GetSubscriptionRequest`): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + + Args: + request (:class:`~.pubsub.UpdateSubscriptionRequest`): + The request object. Request for the UpdateSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsPager: + r"""Lists matching subscriptions. + + + Args: + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The request object. Request for the `ListSubscriptions` + method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSubscriptionsPager: + Response for the ``ListSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubscriptionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + + Args: + request (:class:`~.pubsub.DeleteSubscriptionRequest`): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + + Args: + request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyAckDeadlineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def acknowledge( + self, + request: pubsub.AcknowledgeRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + + Args: + request (:class:`~.pubsub.AcknowledgeRequest`): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.AcknowledgeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def pull( + self, + request: pubsub.PullRequest = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + + Args: + request (:class:`~.pubsub.PullRequest`): + The request object. Request for the `Pull` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, return_immediately, max_messages]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PullRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pull] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: Iterator[pubsub.StreamingPullRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + + Args: + requests (Iterator[`~.pubsub.StreamingPullRequest`]): + The request object iterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[~.pubsub.StreamingPullResponse]: + Response for the ``StreamingPull`` method. This response + is used to stream messages from the server to the + client. + + """ + + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_pull] + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + + Args: + request (:class:`~.pubsub.ModifyPushConfigRequest`): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, push_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyPushConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_push_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_snapshot( + self, + request: pubsub.GetSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.GetSnapshotRequest`): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_snapshots( + self, + request: pubsub.ListSnapshotsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.ListSnapshotsRequest`): + The request object. Request for the `ListSnapshots` + method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSnapshotsPager: + Response for the ``ListSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSnapshotsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot( + self, + request: pubsub.CreateSnapshotRequest = None, + *, + name: str = None, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + + Args: + request (:class:`~.pubsub.CreateSnapshotRequest`): + The request object. Request for the `CreateSnapshot` + method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.CreateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_snapshot( + self, + request: pubsub.UpdateSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + + Args: + request (:class:`~.pubsub.UpdateSnapshotRequest`): + The request object. Request for the UpdateSnapshot + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_snapshot( + self, + request: pubsub.DeleteSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + + Args: + request (:class:`~.pubsub.DeleteSnapshotRequest`): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def seek( + self, + request: pubsub.SeekRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + + Args: + request (:class:`~.pubsub.SeekRequest`): + The request object. Request for the `Seek` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.SeekRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seek] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SubscriberClient",) diff --git a/google/pubsub_v1/services/subscriber/pagers.py b/google/pubsub_v1/services/subscriber/pagers.py new file mode 100644 index 000000000..713184d79 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/pagers.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.pubsub_v1.types import pubsub + + +class ListSubscriptionsPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSubscriptionsResponse], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Subscription]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubscriptionsAsyncPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSubscriptionsResponse]], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSnapshotsResponse], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Snapshot]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsAsyncPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSnapshotsResponse]], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Snapshot]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/pubsub_v1/services/subscriber/transports/__init__.py b/google/pubsub_v1/services/subscriber/transports/__init__.py new file mode 100644 index 000000000..08282e11d --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import SubscriberTransport +from .grpc import SubscriberGrpcTransport +from .grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] +_transport_registry["grpc"] = SubscriberGrpcTransport +_transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + + +__all__ = ( + "SubscriberTransport", + "SubscriberGrpcTransport", + "SubscriberGrpcAsyncIOTransport", +) diff --git a/google/pubsub_v1/services/subscriber/transports/base.py b/google/pubsub_v1/services/subscriber/transports/base.py new file mode 100644 index 000000000..7d7dfc622 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/base.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SubscriberTransport(abc.ABC): + """Abstract transport class for Subscriber.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_subscription: gapic_v1.method.wrap_method( + self.create_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method.wrap_method( + self.get_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: gapic_v1.method.wrap_method( + self.update_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method.wrap_method( + self.list_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method.wrap_method( + self.delete_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: gapic_v1.method.wrap_method( + self.modify_ack_deadline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: gapic_v1.method.wrap_method( + self.acknowledge, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: gapic_v1.method.wrap_method( + self.pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: gapic_v1.method.wrap_method( + self.streaming_pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + ), + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: gapic_v1.method.wrap_method( + self.modify_push_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: gapic_v1.method.wrap_method( + self.update_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: gapic_v1.method.wrap_method( + self.seek, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_subscription( + self, + ) -> typing.Callable[ + [pubsub.Subscription], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def get_subscription( + self, + ) -> typing.Callable[ + [pubsub.GetSubscriptionRequest], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def update_subscription( + self, + ) -> typing.Callable[ + [pubsub.UpdateSubscriptionRequest], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def list_subscriptions( + self, + ) -> typing.Callable[ + [pubsub.ListSubscriptionsRequest], + typing.Union[ + pubsub.ListSubscriptionsResponse, + typing.Awaitable[pubsub.ListSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_subscription( + self, + ) -> typing.Callable[ + [pubsub.DeleteSubscriptionRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def modify_ack_deadline( + self, + ) -> typing.Callable[ + [pubsub.ModifyAckDeadlineRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def acknowledge( + self, + ) -> typing.Callable[ + [pubsub.AcknowledgeRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def pull( + self, + ) -> typing.Callable[ + [pubsub.PullRequest], + typing.Union[pubsub.PullResponse, typing.Awaitable[pubsub.PullResponse]], + ]: + raise NotImplementedError() + + @property + def streaming_pull( + self, + ) -> typing.Callable[ + [pubsub.StreamingPullRequest], + typing.Union[ + pubsub.StreamingPullResponse, typing.Awaitable[pubsub.StreamingPullResponse] + ], + ]: + raise NotImplementedError() + + @property + def modify_push_config( + self, + ) -> typing.Callable[ + [pubsub.ModifyPushConfigRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def get_snapshot( + self, + ) -> typing.Callable[ + [pubsub.GetSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def list_snapshots( + self, + ) -> typing.Callable[ + [pubsub.ListSnapshotsRequest], + typing.Union[ + pubsub.ListSnapshotsResponse, typing.Awaitable[pubsub.ListSnapshotsResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_snapshot( + self, + ) -> typing.Callable[ + [pubsub.CreateSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def update_snapshot( + self, + ) -> typing.Callable[ + [pubsub.UpdateSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def delete_snapshot( + self, + ) -> typing.Callable[ + [pubsub.DeleteSnapshotRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def seek( + self, + ) -> typing.Callable[ + [pubsub.SeekRequest], + typing.Union[pubsub.SeekResponse, typing.Awaitable[pubsub.SeekResponse]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("SubscriberTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/grpc.py b/google/pubsub_v1/services/subscriber/transports/grpc.py new file mode 100644 index 000000000..721d31d36 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -0,0 +1,803 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + + +class SubscriberGrpcTransport(SubscriberTransport): + """gRPC backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], pubsub.Subscription]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[[pubsub.ListSubscriptionsRequest], pubsub.ListSubscriptionsResponse]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + ~.ListSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty.Empty]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty.Empty]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty.Empty]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + ~.PullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[[pubsub.StreamingPullRequest], pubsub.StreamingPullResponse]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + ~.StreamingPullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty.Empty]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[[pubsub.ListSnapshotsRequest], pubsub.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot(self) -> Callable[[pubsub.DeleteSnapshotRequest], empty.Empty]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + ~.SeekResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SubscriberGrpcTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py new file mode 100644 index 000000000..0e844728f --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -0,0 +1,809 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .grpc import SubscriberGrpcTransport + + +class SubscriberGrpcAsyncIOTransport(SubscriberTransport): + """gRPC AsyncIO backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[ + [pubsub.ListSubscriptionsRequest], Awaitable[pubsub.ListSubscriptionsResponse] + ]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + Awaitable[~.ListSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge( + self, + ) -> Callable[[pubsub.AcknowledgeRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], Awaitable[pubsub.PullResponse]]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + Awaitable[~.PullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[ + [pubsub.StreamingPullRequest], Awaitable[pubsub.StreamingPullResponse] + ]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + Awaitable[~.StreamingPullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot( + self, + ) -> Callable[[pubsub.GetSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[ + [pubsub.ListSnapshotsRequest], Awaitable[pubsub.ListSnapshotsResponse] + ]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + Awaitable[~.SeekResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/google/pubsub_v1/types/__init__.py b/google/pubsub_v1/types/__init__.py new file mode 100644 index 000000000..915c3f2e2 --- /dev/null +++ b/google/pubsub_v1/types/__init__.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .pubsub import ( + MessageStoragePolicy, + Topic, + PubsubMessage, + GetTopicRequest, + UpdateTopicRequest, + PublishRequest, + PublishResponse, + ListTopicsRequest, + ListTopicsResponse, + ListTopicSubscriptionsRequest, + ListTopicSubscriptionsResponse, + ListTopicSnapshotsRequest, + ListTopicSnapshotsResponse, + DeleteTopicRequest, + DetachSubscriptionRequest, + DetachSubscriptionResponse, + Subscription, + RetryPolicy, + DeadLetterPolicy, + ExpirationPolicy, + PushConfig, + ReceivedMessage, + GetSubscriptionRequest, + UpdateSubscriptionRequest, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + DeleteSubscriptionRequest, + ModifyPushConfigRequest, + PullRequest, + PullResponse, + ModifyAckDeadlineRequest, + AcknowledgeRequest, + StreamingPullRequest, + StreamingPullResponse, + CreateSnapshotRequest, + UpdateSnapshotRequest, + Snapshot, + GetSnapshotRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, + DeleteSnapshotRequest, + SeekRequest, + SeekResponse, +) + + +__all__ = ( + "MessageStoragePolicy", + "Topic", + "PubsubMessage", + "GetTopicRequest", + "UpdateTopicRequest", + "PublishRequest", + "PublishResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Subscription", + "RetryPolicy", + "DeadLetterPolicy", + "ExpirationPolicy", + "PushConfig", + "ReceivedMessage", + "GetSubscriptionRequest", + "UpdateSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "DeleteSubscriptionRequest", + "ModifyPushConfigRequest", + "PullRequest", + "PullResponse", + "ModifyAckDeadlineRequest", + "AcknowledgeRequest", + "StreamingPullRequest", + "StreamingPullResponse", + "CreateSnapshotRequest", + "UpdateSnapshotRequest", + "Snapshot", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "DeleteSnapshotRequest", + "SeekRequest", + "SeekResponse", +) diff --git a/google/pubsub_v1/types/pubsub.py b/google/pubsub_v1/types/pubsub.py new file mode 100644 index 000000000..61bb089f5 --- /dev/null +++ b/google/pubsub_v1/types/pubsub.py @@ -0,0 +1,1299 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.pubsub.v1", + manifest={ + "MessageStoragePolicy", + "Topic", + "PubsubMessage", + "GetTopicRequest", + "UpdateTopicRequest", + "PublishRequest", + "PublishResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Subscription", + "RetryPolicy", + "DeadLetterPolicy", + "ExpirationPolicy", + "PushConfig", + "ReceivedMessage", + "GetSubscriptionRequest", + "UpdateSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "DeleteSubscriptionRequest", + "ModifyPushConfigRequest", + "PullRequest", + "PullResponse", + "ModifyAckDeadlineRequest", + "AcknowledgeRequest", + "StreamingPullRequest", + "StreamingPullResponse", + "CreateSnapshotRequest", + "UpdateSnapshotRequest", + "Snapshot", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "DeleteSnapshotRequest", + "SeekRequest", + "SeekResponse", + }, +) + + +class MessageStoragePolicy(proto.Message): + r"""A policy constraining the storage of messages published to + the topic. + + Attributes: + allowed_persistence_regions (Sequence[str]): + A list of IDs of GCP regions where messages + that are published to the topic may be persisted + in storage. Messages published by publishers + running in non-allowed GCP regions (or running + outside of GCP altogether) will be routed for + storage in one of the allowed regions. An empty + list means that no regions are allowed, and is + not a valid configuration. + """ + + allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1) + + +class Topic(proto.Message): + r"""A topic resource. + + Attributes: + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and + 255 characters in length, and it must not start with + ``"goog"``. + labels (Sequence[~.pubsub.Topic.LabelsEntry]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + message_storage_policy (~.pubsub.MessageStoragePolicy): + Policy constraining the set of Google Cloud + Platform regions where messages published to the + topic may be stored. If not present, then no + constraints are in effect. + kms_key_name (str): + The resource name of the Cloud KMS CryptoKey to be used to + protect access to messages published on this topic. + + The expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + + name = proto.Field(proto.STRING, number=1) + + labels = proto.MapField(proto.STRING, proto.STRING, number=2) + + message_storage_policy = proto.Field( + proto.MESSAGE, number=3, message=MessageStoragePolicy, + ) + + kms_key_name = proto.Field(proto.STRING, number=5) + + +class PubsubMessage(proto.Message): + r"""A message that is published by publishers and consumed by + subscribers. The message must contain either a non-empty data field + or at least one attribute. Note that client libraries represent this + object differently depending on the language. See the corresponding + `client library + documentation `__ + for more information. See [quotas and limits] + (https://cloud.google.com/pubsub/quotas) for more information about + message limits. + + Attributes: + data (bytes): + The message data field. If this field is + empty, the message must contain at least one + attribute. + attributes (Sequence[~.pubsub.PubsubMessage.AttributesEntry]): + Attributes for this message. If this field is + empty, the message must contain non-empty data. + This can be used to filter messages on the + subscription. + message_id (str): + ID of this message, assigned by the server when the message + is published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` + call. + publish_time (~.timestamp.Timestamp): + The time at which the message was published, populated by + the server when it receives the ``Publish`` call. It must + not be populated by the publisher in a ``Publish`` call. + ordering_key (str): + If non-empty, identifies related messages for which publish + order should be respected. If a ``Subscription`` has + ``enable_message_ordering`` set to ``true``, messages + published with the same non-empty ``ordering_key`` value + will be delivered to subscribers in the order in which they + are received by the Pub/Sub system. All ``PubsubMessage``\ s + published in a given ``PublishRequest`` must specify the + same ``ordering_key`` value. + """ + + data = proto.Field(proto.BYTES, number=1) + + attributes = proto.MapField(proto.STRING, proto.STRING, number=2) + + message_id = proto.Field(proto.STRING, number=3) + + publish_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + ordering_key = proto.Field(proto.STRING, number=5) + + +class GetTopicRequest(proto.Message): + r"""Request for the GetTopic method. + + Attributes: + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field(proto.STRING, number=1) + + +class UpdateTopicRequest(proto.Message): + r"""Request for the UpdateTopic method. + + Attributes: + topic (~.pubsub.Topic): + Required. The updated topic object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the provided topic to + update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but the + ``message_storage_policy`` is not set in the ``topic`` + provided above, then the updated value is determined by the + policy configured at the project or organization level. + """ + + topic = proto.Field(proto.MESSAGE, number=1, message=Topic,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class PublishRequest(proto.Message): + r"""Request for the Publish method. + + Attributes: + topic (str): + Required. The messages in the request will be published on + this topic. Format is ``projects/{project}/topics/{topic}``. + messages (Sequence[~.pubsub.PubsubMessage]): + Required. The messages to publish. + """ + + topic = proto.Field(proto.STRING, number=1) + + messages = proto.RepeatedField(proto.MESSAGE, number=2, message=PubsubMessage,) + + +class PublishResponse(proto.Message): + r"""Response for the ``Publish`` method. + + Attributes: + message_ids (Sequence[str]): + The server-assigned ID of each published + message, in the same order as the messages in + the request. IDs are guaranteed to be unique + within the topic. + """ + + message_ids = proto.RepeatedField(proto.STRING, number=1) + + +class ListTopicsRequest(proto.Message): + r"""Request for the ``ListTopics`` method. + + Attributes: + project (str): + Required. The name of the project in which to list topics. + Format is ``projects/{project-id}``. + page_size (int): + Maximum number of topics to return. + page_token (str): + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicsResponse(proto.Message): + r"""Response for the ``ListTopics`` method. + + Attributes: + topics (Sequence[~.pubsub.Topic]): + The resulting topics. + next_page_token (str): + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """ + + @property + def raw_page(self): + return self + + topics = proto.RepeatedField(proto.MESSAGE, number=1, message=Topic,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTopicSubscriptionsRequest(proto.Message): + r"""Request for the ``ListTopicSubscriptions`` method. + + Attributes: + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of subscription names to + return. + page_token (str): + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicSubscriptionsResponse(proto.Message): + r"""Response for the ``ListTopicSubscriptions`` method. + + Attributes: + subscriptions (Sequence[str]): + The names of subscriptions attached to the + topic specified in the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField(proto.STRING, number=1) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTopicSnapshotsRequest(proto.Message): + r"""Request for the ``ListTopicSnapshots`` method. + + Attributes: + topic (str): + Required. The name of the topic that snapshots are attached + to. Format is ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of snapshot names to return. + page_token (str): + The value returned by the last + ``ListTopicSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListTopicSnapshots`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicSnapshotsResponse(proto.Message): + r"""Response for the ``ListTopicSnapshots`` method. + + Attributes: + snapshots (Sequence[str]): + The names of the snapshots that match the + request. + next_page_token (str): + If not empty, indicates that there may be more snapshots + that match the request; this value should be passed in a new + ``ListTopicSnapshotsRequest`` to get more snapshots. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField(proto.STRING, number=1) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteTopicRequest(proto.Message): + r"""Request for the ``DeleteTopic`` method. + + Attributes: + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field(proto.STRING, number=1) + + +class DetachSubscriptionRequest(proto.Message): + r"""Request for the DetachSubscription method. + + Attributes: + subscription (str): + Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class DetachSubscriptionResponse(proto.Message): + r"""Response for the DetachSubscription method. + Reserved for future use. + """ + + +class Subscription(proto.Message): + r"""A subscription resource. + + Attributes: + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + topic (str): + Required. The name of the topic from which this subscription + is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (~.pubsub.PushConfig): + If push delivery is used with this subscription, this field + is used to configure it. An empty ``pushConfig`` signifies + that the subscriber will pull and ack messages using API + methods. + ack_deadline_seconds (int): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it is + considered to be outstanding. During that time period, the + message will not be redelivered (on a best-effort basis). + + For pull subscriptions, this value is used as the initial + value for the ack deadline. To override this value for a + given message, call ``ModifyAckDeadline`` with the + corresponding ``ack_id`` if using non-streaming pull or send + the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if + using streaming pull. The minimum custom deadline you can + specify is 10 seconds. The maximum custom deadline you can + specify is 600 seconds (10 minutes). If this parameter is 0, + a default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + retain_acked_messages (bool): + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out + of the ``message_retention_duration`` window. This must be + true if you would like to [Seek to a timestamp] + (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). + message_retention_duration (~.duration.Duration): + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than + 10 minutes. + labels (Sequence[~.pubsub.Subscription.LabelsEntry]): + See + Creating and managing labels. + enable_message_ordering (bool): + If true, messages published with the same ``ordering_key`` + in ``PubsubMessage`` will be delivered to the subscribers in + the order in which they are received by the Pub/Sub system. + Otherwise, they may be delivered in any order. + expiration_policy (~.pubsub.ExpirationPolicy): + A policy that specifies the conditions for this + subscription's expiration. A subscription is considered + active as long as any connected subscriber is successfully + consuming messages from the subscription or is issuing + operations on the subscription. If ``expiration_policy`` is + not set, a *default policy* with ``ttl`` of 31 days will be + used. The minimum allowed value for + ``expiration_policy.ttl`` is 1 day. + filter (str): + An expression written in the Pub/Sub `filter + language `__. + If non-empty, then only ``PubsubMessage``\ s whose + ``attributes`` field matches the filter are delivered on + this subscription. If empty, then no messages are filtered + out. + dead_letter_policy (~.pubsub.DeadLetterPolicy): + A policy that specifies the conditions for dead lettering + messages in this subscription. If dead_letter_policy is not + set, dead lettering is disabled. + + The Cloud Pub/Sub service account associated with this + subscriptions's parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Acknowledge() messages on this + subscription. + retry_policy (~.pubsub.RetryPolicy): + A policy that specifies how Pub/Sub retries + message delivery for this subscription. + + If not set, the default retry policy is applied. + This generally implies that messages will be + retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on + NACKs or acknowledgement deadline exceeded + events for a given message. + detached (bool): + Indicates whether the subscription is detached from its + topic. Detached subscriptions don't receive messages from + their topic and don't retain any backlog. ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. + If the subscription is a push subscription, pushes to the + endpoint will not be made. + """ + + name = proto.Field(proto.STRING, number=1) + + topic = proto.Field(proto.STRING, number=2) + + push_config = proto.Field(proto.MESSAGE, number=4, message="PushConfig",) + + ack_deadline_seconds = proto.Field(proto.INT32, number=5) + + retain_acked_messages = proto.Field(proto.BOOL, number=7) + + message_retention_duration = proto.Field( + proto.MESSAGE, number=8, message=duration.Duration, + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=9) + + enable_message_ordering = proto.Field(proto.BOOL, number=10) + + expiration_policy = proto.Field( + proto.MESSAGE, number=11, message="ExpirationPolicy", + ) + + filter = proto.Field(proto.STRING, number=12) + + dead_letter_policy = proto.Field( + proto.MESSAGE, number=13, message="DeadLetterPolicy", + ) + + retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) + + detached = proto.Field(proto.BOOL, number=15) + + +class RetryPolicy(proto.Message): + r"""A policy that specifies how Cloud Pub/Sub retries message delivery. + + Retry delay will be exponential based on provided minimum and + maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. + + RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + + Retry Policy is implemented on a best effort basis. At times, the + delay between consecutive deliveries may not match the + configuration. That is, delay can be more or less than configured + backoff. + + Attributes: + minimum_backoff (~.duration.Duration): + The minimum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 10 + seconds. + maximum_backoff (~.duration.Duration): + The maximum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 600 + seconds. + """ + + minimum_backoff = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + maximum_backoff = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + + +class DeadLetterPolicy(proto.Message): + r"""Dead lettering is done on a best effort basis. The same + message might be dead lettered multiple times. + + If validation on any of the fields fails at subscription + creation/updation, the create/update subscription request will + fail. + + Attributes: + dead_letter_topic (str): + The name of the topic to which dead letter messages should + be published. Format is + ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub + service account associated with the enclosing subscription's + parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Publish() to this topic. + + The operation will fail if the topic does not exist. Users + should ensure that there is a subscription attached to this + topic since messages published to a topic with no + subscriptions are lost. + max_delivery_attempts (int): + The maximum number of delivery attempts for any message. The + value must be between 5 and 100. + + The number of delivery attempts is defined as 1 + (the sum + of number of NACKs and number of times the acknowledgement + deadline has been exceeded for the message). + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + Note that client libraries may automatically extend + ack_deadlines. + + This field will be honored on a best effort basis. + + If this parameter is 0, a default value of 5 is used. + """ + + dead_letter_topic = proto.Field(proto.STRING, number=1) + + max_delivery_attempts = proto.Field(proto.INT32, number=2) + + +class ExpirationPolicy(proto.Message): + r"""A policy that specifies the conditions for resource + expiration (i.e., automatic resource deletion). + + Attributes: + ttl (~.duration.Duration): + Specifies the "time-to-live" duration for an associated + resource. The resource expires if it is not active for a + period of ``ttl``. The definition of "activity" depends on + the type of the associated resource. The minimum and maximum + allowed values for ``ttl`` depend on the type of the + associated resource, as well. If ``ttl`` is not set, the + associated resource never expires. + """ + + ttl = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + +class PushConfig(proto.Message): + r"""Configuration for a push delivery endpoint. + + Attributes: + push_endpoint (str): + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + ``https://example.com/push``. + attributes (Sequence[~.pubsub.PushConfig.AttributesEntry]): + Endpoint configuration attributes that can be used to + control different aspects of the message delivery. + + The only currently supported attribute is + ``x-goog-version``, which you can use to change the format + of the pushed message. This attribute indicates the version + of the data expected by the endpoint. This controls the + shape of the pushed message (i.e., its fields and metadata). + + If not present during the ``CreateSubscription`` call, it + will default to the version of the Pub/Sub API used to make + such call. If not present in a ``ModifyPushConfig`` call, + its value will not be changed. ``GetSubscription`` calls + will always return a valid version, even if the subscription + was created without this attribute. + + The only supported values for the ``x-goog-version`` + attribute are: + + - ``v1beta1``: uses the push format defined in the v1beta1 + Pub/Sub API. + - ``v1`` or ``v1beta2``: uses the push format defined in + the v1 Pub/Sub API. + + For example: + + .. raw:: html + +
attributes { "x-goog-version": "v1" } 
+ oidc_token (~.pubsub.PushConfig.OidcToken): + If specified, Pub/Sub will generate and attach an OIDC JWT + token as an ``Authorization`` header in the HTTP request for + every pushed message. + """ + + class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating the OIDC token. The caller (for + CreateSubscription, UpdateSubscription, and ModifyPushConfig + RPCs) must have the iam.serviceAccounts.actAs permission for + the service account. + audience (str): + Audience to be used when generating OIDC + token. The audience claim identifies the + recipients that the JWT is intended for. The + audience value is a single case-sensitive + string. Having multiple values (array) for the + audience field is not supported. More info about + the OIDC JWT token audience here: + https://tools.ietf.org/html/rfc7519#section-4.1.3 + Note: if not specified, the Push endpoint URL + will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + audience = proto.Field(proto.STRING, number=2) + + push_endpoint = proto.Field(proto.STRING, number=1) + + attributes = proto.MapField(proto.STRING, proto.STRING, number=2) + + oidc_token = proto.Field( + proto.MESSAGE, number=3, oneof="authentication_method", message=OidcToken, + ) + + +class ReceivedMessage(proto.Message): + r"""A message and its corresponding acknowledgment ID. + + Attributes: + ack_id (str): + This ID can be used to acknowledge the + received message. + message (~.pubsub.PubsubMessage): + The message. + delivery_attempt (int): + The approximate number of times that Cloud Pub/Sub has + attempted to deliver the associated message to a subscriber. + + More precisely, this is 1 + (number of NACKs) + (number of + ack_deadline exceeds) for this message. + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + An ack_deadline exceeds event is whenever a message is not + acknowledged within ack_deadline. Note that ack_deadline is + initially Subscription.ackDeadlineSeconds, but may get + extended automatically by the client library. + + Upon the first delivery of a given message, + ``delivery_attempt`` will have a value of 1. The value is + calculated at best effort and is approximate. + + If a DeadLetterPolicy is not set on the subscription, this + will be 0. + """ + + ack_id = proto.Field(proto.STRING, number=1) + + message = proto.Field(proto.MESSAGE, number=2, message=PubsubMessage,) + + delivery_attempt = proto.Field(proto.INT32, number=3) + + +class GetSubscriptionRequest(proto.Message): + r"""Request for the GetSubscription method. + + Attributes: + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class UpdateSubscriptionRequest(proto.Message): + r"""Request for the UpdateSubscription method. + + Attributes: + subscription (~.pubsub.Subscription): + Required. The updated subscription object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the + provided subscription to update. Must be + specified and non-empty. + """ + + subscription = proto.Field(proto.MESSAGE, number=1, message=Subscription,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class ListSubscriptionsRequest(proto.Message): + r"""Request for the ``ListSubscriptions`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of subscriptions to return. + page_token (str): + The value returned by the last + ``ListSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListSubscriptions`` call, and that + the system should return the next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListSubscriptionsResponse(proto.Message): + r"""Response for the ``ListSubscriptions`` method. + + Attributes: + subscriptions (Sequence[~.pubsub.Subscription]): + The subscriptions that match the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField(proto.MESSAGE, number=1, message=Subscription,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteSubscriptionRequest(proto.Message): + r"""Request for the DeleteSubscription method. + + Attributes: + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class ModifyPushConfigRequest(proto.Message): + r"""Request for the ModifyPushConfig method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config (~.pubsub.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system + should stop pushing messages from the given subscription and + allow messages to be pulled and acknowledged - effectively + pausing the subscription if ``Pull`` or ``StreamingPull`` is + not called. + """ + + subscription = proto.Field(proto.STRING, number=1) + + push_config = proto.Field(proto.MESSAGE, number=2, message=PushConfig,) + + +class PullRequest(proto.Message): + r"""Request for the ``Pull`` method. + + Attributes: + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + return_immediately (bool): + Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to + return in the ``Pull`` response. Otherwise, the system may + wait (for a bounded amount of time) until at least one + message is available, rather than returning no messages. + Warning: setting this field to ``true`` is discouraged + because it adversely impacts the performance of ``Pull`` + operations. We recommend that users do not set this field. + max_messages (int): + Required. The maximum number of messages to + return for this request. Must be a positive + integer. The Pub/Sub system may return fewer + than the number specified. + """ + + subscription = proto.Field(proto.STRING, number=1) + + return_immediately = proto.Field(proto.BOOL, number=2) + + max_messages = proto.Field(proto.INT32, number=3) + + +class PullResponse(proto.Message): + r"""Response for the ``Pull`` method. + + Attributes: + received_messages (Sequence[~.pubsub.ReceivedMessage]): + Received Pub/Sub messages. The list will be empty if there + are no more messages available in the backlog. For JSON, the + response can be entirely empty. The Pub/Sub system may + return fewer than the ``maxMessages`` requested even if + there are more messages available in the backlog. + """ + + received_messages = proto.RepeatedField( + proto.MESSAGE, number=1, message=ReceivedMessage, + ) + + +class ModifyAckDeadlineRequest(proto.Message): + r"""Request for the ModifyAckDeadline method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. List of acknowledgment IDs. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time this + request was sent to the Pub/Sub system. For example, if the + value is 10, the new ack deadline will expire 10 seconds + after the ``ModifyAckDeadline`` call was made. Specifying + zero might immediately make the message available for + delivery to another subscriber client. This typically + results in an increase in the rate of message redeliveries + (that is, duplicates). The minimum deadline you can specify + is 0 seconds. The maximum deadline you can specify is 600 + seconds (10 minutes). + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=4) + + ack_deadline_seconds = proto.Field(proto.INT32, number=3) + + +class AcknowledgeRequest(proto.Message): + r"""Request for the Acknowledge method. + + Attributes: + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in the + ``Pull`` response. Must not be empty. + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=2) + + +class StreamingPullRequest(proto.Message): + r"""Request for the ``StreamingPull`` streaming RPC method. This request + is used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to + the server. + + Attributes: + subscription (str): + Required. The subscription for which to initialize the new + stream. This must be provided in the first request on the + stream, and must not be set in subsequent requests from + client to server. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID + is malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds (Sequence[int]): + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be + the same as the size of ``modify_deadline_ack_ids``. If it + differs the stream will be aborted with + ``INVALID_ARGUMENT``. Each element in this list is applied + to the element in the same position in + ``modify_deadline_ack_ids``. The new ack deadline is with + respect to the time this request was sent to the Pub/Sub + system. Must be >= 0. For example, if the value is 10, the + new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids (Sequence[str]): + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by + the subscriber, or to make the message available for + redelivery if the processing was interrupted. + stream_ack_deadline_seconds (int): + Required. The ack deadline to use for the + stream. This must be provided in the first + request on the stream, but it can also be + updated on subsequent requests from client to + server. The minimum deadline you can specify is + 10 seconds. The maximum deadline you can specify + is 600 seconds (10 minutes). + client_id (str): + A unique identifier that is used to distinguish client + instances from each other. Only needs to be provided on the + initial request. When a stream disconnects and reconnects + for the same stream, the client_id should be set to the same + value so that state associated with the old stream can be + transferred to the new stream. The same client_id should not + be used for different client instances. + max_outstanding_messages (int): + Flow control settings for the maximum number of outstanding + messages. When there are ``max_outstanding_messages`` or + more currently sent to the streaming pull client that have + not yet been acked or nacked, the server stops sending more + messages. The sending of messages resumes once the number of + outstanding messages is less than this value. If the value + is <= 0, there is no limit to the number of outstanding + messages. This property can only be set on the initial + StreamingPullRequest. If it is set on a subsequent request, + the stream will be aborted with status ``INVALID_ARGUMENT``. + max_outstanding_bytes (int): + Flow control settings for the maximum number of outstanding + bytes. When there are ``max_outstanding_bytes`` or more + worth of messages currently sent to the streaming pull + client that have not yet been acked or nacked, the server + will stop sending more messages. The sending of messages + resumes once the number of outstanding bytes is less than + this value. If the value is <= 0, there is no limit to the + number of outstanding bytes. This property can only be set + on the initial StreamingPullRequest. If it is set on a + subsequent request, the stream will be aborted with status + ``INVALID_ARGUMENT``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=2) + + modify_deadline_seconds = proto.RepeatedField(proto.INT32, number=3) + + modify_deadline_ack_ids = proto.RepeatedField(proto.STRING, number=4) + + stream_ack_deadline_seconds = proto.Field(proto.INT32, number=5) + + client_id = proto.Field(proto.STRING, number=6) + + max_outstanding_messages = proto.Field(proto.INT64, number=7) + + max_outstanding_bytes = proto.Field(proto.INT64, number=8) + + +class StreamingPullResponse(proto.Message): + r"""Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + Attributes: + received_messages (Sequence[~.pubsub.ReceivedMessage]): + Received Pub/Sub messages. This will not be + empty. + """ + + received_messages = proto.RepeatedField( + proto.MESSAGE, number=1, message=ReceivedMessage, + ) + + +class CreateSnapshotRequest(proto.Message): + r"""Request for the ``CreateSnapshot`` method. + + Attributes: + name (str): + Required. User-provided name for this snapshot. If the name + is not provided in the request, the server will assign a + random name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. See the resource name rules. Format is + ``projects/{project}/snapshots/{snap}``. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is guaranteed to + retain: (a) The existing backlog on the subscription. More + precisely, this is defined as the messages in the + subscription's backlog that are unacknowledged upon the + successful completion of the ``CreateSnapshot`` request; as + well as: (b) Any messages published to the subscription's + topic following the successful completion of the + CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + labels (Sequence[~.pubsub.CreateSnapshotRequest.LabelsEntry]): + See + Creating and managing labels. + """ + + name = proto.Field(proto.STRING, number=1) + + subscription = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + +class UpdateSnapshotRequest(proto.Message): + r"""Request for the UpdateSnapshot method. + + Attributes: + snapshot (~.pubsub.Snapshot): + Required. The updated snapshot object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the + provided snapshot to update. Must be specified + and non-empty. + """ + + snapshot = proto.Field(proto.MESSAGE, number=1, message="Snapshot",) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class Snapshot(proto.Message): + r"""A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in + an existing subscription to the state captured by a snapshot. + + Attributes: + name (str): + The name of the snapshot. + topic (str): + The name of the topic from which this + snapshot is retaining messages. + expire_time (~.timestamp.Timestamp): + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is + ``7 days - (age of oldest unacked message in the subscription)``. + For example, consider a subscription whose oldest unacked + message is 3 days old. If a snapshot is created from this + subscription, the snapshot -- which will always capture this + 3-day-old backlog as long as the snapshot exists -- will + expire in 4 days. The service will refuse to create a + snapshot that would expire in less than 1 hour after + creation. + labels (Sequence[~.pubsub.Snapshot.LabelsEntry]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + """ + + name = proto.Field(proto.STRING, number=1) + + topic = proto.Field(proto.STRING, number=2) + + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + labels = proto.MapField(proto.STRING, proto.STRING, number=4) + + +class GetSnapshotRequest(proto.Message): + r"""Request for the GetSnapshot method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field(proto.STRING, number=1) + + +class ListSnapshotsRequest(proto.Message): + r"""Request for the ``ListSnapshots`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of snapshots to return. + page_token (str): + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return + the next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListSnapshotsResponse(proto.Message): + r"""Response for the ``ListSnapshots`` method. + + Attributes: + snapshots (Sequence[~.pubsub.Snapshot]): + The resulting snapshots. + next_page_token (str): + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message=Snapshot,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request for the ``DeleteSnapshot`` method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field(proto.STRING, number=1) + + +class SeekRequest(proto.Message): + r"""Request for the ``Seek`` method. + + Attributes: + subscription (str): + Required. The subscription to affect. + time (~.timestamp.Timestamp): + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages + retained in the subscription (configured by the combination + of ``message_retention_duration`` and + ``retain_acked_messages``). For example, if ``time`` + corresponds to a point before the message retention window + (or to a point before the system's notion of the + subscription creation time), only retained messages will be + marked as unacknowledged, and already-expunged messages will + not be restored. + snapshot (str): + The snapshot to seek to. The snapshot's topic must be the + same as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + time = proto.Field( + proto.MESSAGE, number=2, oneof="target", message=timestamp.Timestamp, + ) + + snapshot = proto.Field(proto.STRING, number=3, oneof="target") + + +class SeekResponse(proto.Message): + r"""Response for the ``Seek`` method (this response is empty).""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..4505b4854 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index 615358c2e..09e7acbda 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -100,6 +102,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -158,3 +164,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 2c67c2c11..8c2c31a8e 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples @@ -15,13 +16,11 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs - - - Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -32,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -62,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Quickstart (Publisher) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -81,6 +89,8 @@ To run this sample: $ python quickstart/pub.py + + Quickstart (Subscriber) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -97,6 +107,8 @@ To run this sample: $ python quickstart/sub.py + + Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -110,7 +122,8 @@ To run this sample: .. code-block:: bash - $ python publisher.py --help + $ python publisher.py + usage: publisher.py [-h] project_id @@ -152,6 +165,8 @@ To run this sample: + + Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -165,7 +180,8 @@ To run this sample: .. code-block:: bash - $ python subscriber.py --help + $ python subscriber.py + usage: subscriber.py [-h] project_id @@ -215,6 +231,9 @@ To run this sample: -h, --help show this help message and exit + + + Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -227,15 +246,21 @@ Identity and Access Management To run this sample: .. code-block:: bash + $ python iam.py + + usage: iam.py [-h] project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... + This application demonstrates how to perform basic operations on IAM policies with the Cloud Pub/Sub API. + For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. + positional arguments: project Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -251,10 +276,18 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. + optional arguments: -h, --help show this help message and exit + + + + + + + The client library ------------------------------------------------------------------------------- @@ -270,4 +303,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues + .. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/samples/snippets/iam.py b/samples/snippets/iam.py index 71c55d764..71ee5da1b 100644 --- a/samples/snippets/iam.py +++ b/samples/snippets/iam.py @@ -36,7 +36,7 @@ def get_topic_policy(project, topic_id): client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) print("Policy for topic {}:".format(topic_path)) for binding in policy.bindings: @@ -56,7 +56,7 @@ def get_subscription_policy(project, subscription_id): client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) print("Policy for subscription {}:".format(subscription_path)) for binding in policy.bindings: @@ -78,7 +78,7 @@ def set_topic_policy(project, topic_id): client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) # Add all users as viewers. policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) @@ -89,7 +89,7 @@ def set_topic_policy(project, topic_id): ) # Set the policy - policy = client.set_iam_policy(topic_path, policy) + policy = client.set_iam_policy(request={"resource": topic_path, "policy": policy}) print("IAM policy for topic {} set: {}".format(topic_id, policy)) # [END pubsub_set_topic_policy] @@ -107,7 +107,7 @@ def set_subscription_policy(project, subscription_id): client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) # Add all users as viewers. policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) @@ -116,7 +116,9 @@ def set_subscription_policy(project, subscription_id): policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) # Set the policy - policy = client.set_iam_policy(subscription_path, policy) + policy = client.set_iam_policy( + request={"resource": subscription_path, "policy": policy} + ) print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) @@ -138,7 +140,9 @@ def check_topic_permissions(project, topic_id): permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] - allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) + allowed_permissions = client.test_iam_permissions( + request={"resource": topic_path, "permissions": permissions_to_check} + ) print( "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) @@ -164,7 +168,7 @@ def check_subscription_permissions(project, subscription_id): ] allowed_permissions = client.test_iam_permissions( - subscription_path, permissions_to_check + request={"resource": subscription_path, "permissions": permissions_to_check} ) print( diff --git a/samples/snippets/iam_test.py b/samples/snippets/iam_test.py index d196953f6..17bfdb256 100644 --- a/samples/snippets/iam_test.py +++ b/samples/snippets/iam_test.py @@ -36,15 +36,15 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) except Exception: pass - publisher_client.create_topic(topic_path) + publisher_client.create_topic(request={"name": topic_path}) yield topic_path - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) @pytest.fixture(scope="module") @@ -59,15 +59,19 @@ def subscription(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass - subscriber_client.create_subscription(subscription_path, topic=topic) + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) yield subscription_path - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_get_topic_policy(topic, capsys): @@ -87,7 +91,7 @@ def test_get_subscription_policy(subscription, capsys): def test_set_topic_policy(publisher_client, topic): iam.set_topic_policy(PROJECT, TOPIC) - policy = publisher_client.get_iam_policy(topic) + policy = publisher_client.get_iam_policy(request={"resource": topic}) assert "roles/pubsub.publisher" in str(policy) assert "allUsers" in str(policy) @@ -95,7 +99,7 @@ def test_set_topic_policy(publisher_client, topic): def test_set_subscription_policy(subscriber_client, subscription): iam.set_subscription_policy(PROJECT, SUBSCRIPTION) - policy = subscriber_client.get_iam_policy(subscription) + policy = subscriber_client.get_iam_policy(request={"resource": subscription}) assert "roles/pubsub.viewer" in str(policy) assert "allUsers" in str(policy) diff --git a/samples/snippets/publisher.py b/samples/snippets/publisher.py index 399d37679..d1b760280 100644 --- a/samples/snippets/publisher.py +++ b/samples/snippets/publisher.py @@ -33,9 +33,9 @@ def list_topics(project_id): # project_id = "your-project-id" publisher = pubsub_v1.PublisherClient() - project_path = publisher.project_path(project_id) + project_path = f"projects/{project_id}" - for topic in publisher.list_topics(project_path): + for topic in publisher.list_topics(request={"project": project_path}): print(topic) # [END pubsub_list_topics] @@ -53,7 +53,7 @@ def create_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - topic = publisher.create_topic(topic_path) + topic = publisher.create_topic(request={"name": topic_path}) print("Topic created: {}".format(topic)) # [END pubsub_quickstart_create_topic] @@ -72,7 +72,7 @@ def delete_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - publisher.delete_topic(topic_path) + publisher.delete_topic(request={"topic": topic_path}) print("Topic deleted: {}".format(topic_path)) # [END pubsub_delete_topic] @@ -94,11 +94,11 @@ def publish_messages(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data) print(future.result()) print("Published messages.") @@ -120,7 +120,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") # Add two attributes, origin and username, to the message @@ -163,9 +163,7 @@ def callback(f): data = str(i) futures.update({data: None}) # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data.encode("utf-8") # data must be a bytestring. - ) + future = publisher.publish(topic_path, data.encode("utf-8")) futures[data] = future # Publish failures shall be handled in the callback function. future.add_done_callback(get_callback(future, data)) @@ -203,10 +201,10 @@ def callback(future): print(message_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch multiple messages. future.add_done_callback(callback) @@ -217,56 +215,38 @@ def callback(future): def publish_messages_with_retry_settings(project_id, topic_id): """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] + from google import api_core from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the retry settings. Defaults will be overwritten. - retry_settings = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ] - }, - "retry_params": { - "messaging": { - "initial_retry_delay_millis": 100, # default: 100 - "retry_delay_multiplier": 1.3, # default: 1.3 - "max_retry_delay_millis": 60000, # default: 60000 - "initial_rpc_timeout_millis": 5000, # default: 25000 - "rpc_timeout_multiplier": 1.0, # default: 1.0 - "max_rpc_timeout_millis": 600000, # default: 30000 - "total_timeout_millis": 600000, # default: 600000 - } - }, - "methods": { - "Publish": { - "retry_codes_name": "publish", - "retry_params_name": "messaging", - } - }, - } - } - } - - publisher = pubsub_v1.PublisherClient(client_config=retry_settings) + # Configure the retry settings. + custom_retry = api_core.retry.Retry( + initial=0.250, # seconds (default: 0.1) + maximum=90.0, # seconds (default: 60.0) + multiplier=1.45, # default: 1.3 + deadline=300.0, # seconds (default: 600.0) + predicate=api_core.retry.if_exception_type( + api_core.exceptions.Aborted, + api_core.exceptions.DeadlineExceeded, + api_core.exceptions.InternalServerError, + api_core.exceptions.ResourceExhausted, + api_core.exceptions.ServiceUnavailable, + api_core.exceptions.Unknown, + api_core.exceptions.Cancelled, + ), + ) + + publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) print(future.result()) print("Published messages with retry settings.") diff --git a/samples/snippets/publisher_test.py b/samples/snippets/publisher_test.py index 95fda846a..c95ea7ed5 100644 --- a/samples/snippets/publisher_test.py +++ b/samples/snippets/publisher_test.py @@ -39,9 +39,9 @@ def topic_admin(client): topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - topic = client.get_topic(topic_path) + topic = client.get_topic(request={"topic": topic_path}) except: # noqa - topic = client.create_topic(topic_path) + topic = client.create_topic(request={"name": topic_path}) yield topic.name # Teardown of `topic_admin` is handled in `test_delete()`. @@ -52,13 +52,13 @@ def topic_publish(client): topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) try: - topic = client.get_topic(topic_path) + topic = client.get_topic(request={"topic": topic_path}) except: # noqa - topic = client.create_topic(topic_path) + topic = client.create_topic(request={"name": topic_path}) yield topic.name - client.delete_topic(topic.name) + client.delete_topic(request={"topic": topic.name}) def _make_sleep_patch(): @@ -87,7 +87,7 @@ def eventually_consistent_test(): def test_create(client): topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - client.delete_topic(topic_path) + client.delete_topic(request={"topic": topic_path}) except Exception: pass @@ -95,7 +95,7 @@ def test_create(client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert client.get_topic(topic_path) + assert client.get_topic(request={"topic": topic_path}) eventually_consistent_test() @@ -106,7 +106,7 @@ def test_delete(client, topic_admin): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): with pytest.raises(Exception): - client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) + client.get_topic(request={"topic": client.topic_path(PROJECT, TOPIC_ADMIN)}) eventually_consistent_test() diff --git a/samples/snippets/quickstart/pub.py b/samples/snippets/quickstart/pub.py index 16432c0c3..8585711f3 100644 --- a/samples/snippets/quickstart/pub.py +++ b/samples/snippets/quickstart/pub.py @@ -63,7 +63,7 @@ def pub(project_id, topic_id): ref = dict({"num_messages": 0}) # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) + api_future = client.publish(topic_path, data) api_future.add_done_callback(get_callback(api_future, data, ref)) # Keep the main thread from exiting while the message future diff --git a/samples/snippets/quickstart/pub_test.py b/samples/snippets/quickstart/pub_test.py index 6f5cc06c4..0be087bd2 100644 --- a/samples/snippets/quickstart/pub_test.py +++ b/samples/snippets/quickstart/pub_test.py @@ -39,13 +39,13 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.create_topic(topic_path) + publisher_client.create_topic(request={"name": topic_path}) except AlreadyExists: pass yield TOPIC - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) def test_pub(publisher_client, topic, capsys): diff --git a/samples/snippets/quickstart/sub_test.py b/samples/snippets/quickstart/sub_test.py index 38047422a..089705af6 100644 --- a/samples/snippets/quickstart/sub_test.py +++ b/samples/snippets/quickstart/sub_test.py @@ -38,12 +38,12 @@ def topic_path(): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name except AlreadyExists: yield topic_path - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) @pytest.fixture(scope="module") @@ -52,18 +52,18 @@ def subscription_path(topic_path): try: subscription = subscriber_client.create_subscription( - subscription_path, topic_path + request={"name": subscription_path, "topic": topic_path} ) yield subscription.name except AlreadyExists: yield subscription_path - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) subscriber_client.close() def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, data=b"Hello World!") + publish_future = publisher_client.publish(topic_path, b"Hello World!") publish_future.result() diff --git a/samples/snippets/subscriber.py b/samples/snippets/subscriber.py index 94e1c5cd4..39a05ef37 100644 --- a/samples/snippets/subscriber.py +++ b/samples/snippets/subscriber.py @@ -36,7 +36,8 @@ def list_subscriptions_in_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - for subscription in publisher.list_topic_subscriptions(topic_path): + response = publisher.list_topic_subscriptions(request={"topic": topic_path}) + for subscription in response: print(subscription) # [END pubsub_list_topic_subscriptions] @@ -50,12 +51,14 @@ def list_subscriptions_in_project(project_id): # project_id = "your-project-id" subscriber = pubsub_v1.SubscriberClient() - project_path = subscriber.project_path(project_id) + project_path = f"projects/{project_id}" # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - for subscription in subscriber.list_subscriptions(project_path): + for subscription in subscriber.list_subscriptions( + request={"project": project_path} + ): print(subscription.name) # [END pubsub_list_subscriptions] @@ -70,14 +73,17 @@ def create_subscription(project_id, topic_id, subscription_id): # topic_id = "your-topic-id" # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscription = subscriber.create_subscription(subscription_path, topic_path) + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) print("Subscription created: {}".format(subscription)) # [END pubsub_create_pull_subscription] @@ -103,19 +109,24 @@ def create_subscription_with_dead_letter_topic( # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) dead_letter_policy = DeadLetterPolicy( dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 ) with subscriber: - subscription = subscriber.create_subscription( - subscription_path, topic_path, dead_letter_policy=dead_letter_policy - ) + request = { + "name": subscription_path, + "topic": topic_path, + "dead_letter_policy": dead_letter_policy, + } + subscription = subscriber.create_subscription(request) print("Subscription created: {}".format(subscription.name)) print( @@ -142,8 +153,9 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # subscription_id = "your-subscription-id" # endpoint = "https://my-test-project.appspot.com/push" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) @@ -152,7 +164,11 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # close the underlying gRPC channel when done. with subscriber: subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + } ) print("Push subscription created: {}".format(subscription)) @@ -170,13 +186,18 @@ def create_subscription_with_ordering(project_id, topic_id, subscription_id): # topic_id = "your-topic-id" # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) with subscriber: subscription = subscriber.create_subscription( - subscription_path, topic_path, enable_message_ordering=True + request={ + "name": subscription_path, + "topic": topic_path, + "enable_message_ordering": True, + } ) print("Created subscription with ordering: {}".format(subscription)) # [END pubsub_enable_subscription_ordering] @@ -197,7 +218,7 @@ def delete_subscription(project_id, subscription_id): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscriber.delete_subscription(subscription_path) + subscriber.delete_subscription(request={"subscription": subscription_path}) print("Subscription deleted: {}".format(subscription_path)) # [END pubsub_delete_subscription] @@ -232,7 +253,9 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - result = subscriber.update_subscription(subscription, update_mask) + result = subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) print("Subscription updated: {}".format(subscription_path)) print("New endpoint for subscription is: {}".format(result.push_config)) @@ -258,12 +281,16 @@ def update_subscription_with_dead_letter_policy( # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) - subscription_before_update = subscriber.get_subscription(subscription_path) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) print("Before the update: {}".format(subscription_before_update)) # Indicates which fields in the provided subscription to update. @@ -283,7 +310,7 @@ def update_subscription_with_dead_letter_policy( with subscriber: subscription_after_update = subscriber.update_subscription( - subscription, update_mask + request={"subscription": subscription, "update_mask": update_mask} ) print("After the update: {}".format(subscription_after_update)) @@ -305,11 +332,14 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): # TODO(developer): This is an existing subscription with a dead letter policy. # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - subscription_before_update = subscriber.get_subscription(subscription_path) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) print("Before removing the policy: {}".format(subscription_before_update)) # Indicates which fields in the provided subscription to update. @@ -328,7 +358,7 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): with subscriber: subscription_after_update = subscriber.update_subscription( - subscription, update_mask + request={"subscription": subscription, "update_mask": update_mask} ) print("After removing the policy: {}".format(subscription_after_update)) @@ -467,7 +497,9 @@ def synchronous_pull(project_id, subscription_id): # close the underlying gRPC channel when done. with subscriber: # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + ) ack_ids = [] for received_message in response.received_messages: @@ -475,7 +507,9 @@ def synchronous_pull(project_id, subscription_id): ack_ids.append(received_message.ack_id) # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription_path, ack_ids) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": ack_ids} + ) print( "Received and acknowledged {} messages. Done.".format( @@ -507,7 +541,9 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): SLEEP_TIME = 10 # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + ) multiprocessing.log_to_stderr() logger = multiprocessing.get_logger() @@ -539,7 +575,11 @@ def worker(msg): if process.is_alive(): # `ack_deadline_seconds` must be between 10 to 600. subscriber.modify_ack_deadline( - subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, + request={ + "subscription": subscription_path, + "ack_ids": [ack_id], + "ack_deadline_seconds": ACK_DEADLINE, + } ) logger.info( "{}: Reset ack deadline for {} for {}s".format( @@ -549,7 +589,9 @@ def worker(msg): # If the processs is finished, acknowledges using `ack_id`. else: - subscriber.acknowledge(subscription_path, [ack_id]) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": [ack_id]} + ) logger.info( "{}: Acknowledged {}".format( time.strftime("%X", time.gmtime()), msg_data diff --git a/samples/snippets/subscriber_test.py b/samples/snippets/subscriber_test.py index 62018e9a9..37b83b877 100644 --- a/samples/snippets/subscriber_test.py +++ b/samples/snippets/subscriber_test.py @@ -44,13 +44,13 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - topic = publisher_client.get_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name - publisher_client.delete_topic(topic.name) + publisher_client.delete_topic(request={"topic": topic.name}) @pytest.fixture(scope="module") @@ -58,13 +58,13 @@ def dead_letter_topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) try: - dead_letter_topic = publisher_client.get_topic(topic_path) + dead_letter_topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - dead_letter_topic = publisher_client.create_topic(topic_path) + dead_letter_topic = publisher_client.create_topic(request={"name": topic_path}) yield dead_letter_topic.name - publisher_client.delete_topic(dead_letter_topic.name) + publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) @pytest.fixture(scope="module") @@ -79,10 +79,12 @@ def subscription_admin(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name @@ -93,15 +95,17 @@ def subscription_sync(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) @pytest.fixture(scope="module") @@ -109,15 +113,17 @@ def subscription_async(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) @pytest.fixture(scope="module") @@ -125,15 +131,17 @@ def subscription_dlq(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) def test_list_in_topic(subscription_admin, capsys): @@ -160,7 +168,9 @@ def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -168,7 +178,9 @@ def test_create(subscriber_client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + assert subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) eventually_consistent_test() @@ -180,7 +192,9 @@ def test_create_subscription_with_dead_letter_policy( dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -197,7 +211,9 @@ def test_create_subscription_with_dead_letter_policy( def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -205,7 +221,9 @@ def test_create_push(subscriber_client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + assert subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) eventually_consistent_test() @@ -217,7 +235,7 @@ def test_create_subscription_with_ordering(subscriber_client, capsys): assert "enable_message_ordering: true" in out subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ORDERING) - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_update(subscriber_client, subscription_admin, capsys): @@ -246,17 +264,17 @@ def test_delete(subscriber_client, subscription_admin): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): with pytest.raises(Exception): - subscriber_client.get_subscription(subscription_admin) + subscriber_client.get_subscription( + request={"subscription": subscription_admin} + ) eventually_consistent_test() -def _publish_messages(publisher_client, topic): +def _publish_messages(publisher_client, topic, **attrs): for n in range(5): data = u"message {}".format(n).encode("utf-8") - publish_future = publisher_client.publish( - topic, data=data, origin="python-sample" - ) + publish_future = publisher_client.publish(topic, data, **attrs) publish_future.result() @@ -275,7 +293,7 @@ def test_receive_with_custom_attributes( publisher_client, topic, subscription_async, capsys ): - _publish_messages(publisher_client, topic) + _publish_messages(publisher_client, topic, origin="python-sample") subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) diff --git a/scripts/fixup_pubsub_v1_keywords.py b/scripts/fixup_pubsub_v1_keywords.py new file mode 100644 index 000000000..b54a7ad37 --- /dev/null +++ b/scripts/fixup_pubsub_v1_keywords.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class pubsubCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + + 'get_iam_policy': ('resource', 'options', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=pubsubCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the pubsub client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index 3da2e269a..c26d140a4 100644 --- a/setup.py +++ b/setup.py @@ -22,19 +22,17 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.7.0" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # google-api-core[grpc] 1.17.0 up to 1.19.1 causes problems with stream - # recovery, thus those versions should not be used. - # https://github.com/googleapis/python-pubsub/issues/74 - "google-api-core[grpc] >= 1.14.0, != 1.17.*, != 1.18.*, != 1.19.*", + "google-api-core[grpc] >= 1.22.1, < 2.0.0dev", + "libcst >= 0.3.10", + "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - 'enum34; python_version < "3.4"', ] extras = {} @@ -50,7 +48,9 @@ # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] # Determine which namespaces are needed. @@ -73,12 +73,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -87,7 +85,8 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", + python_requires=">=3.6", + scripts=["scripts/fixup_pubsub_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index f67fbeec5..46bc8fdd2 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,37 +3,29 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "c8f63788636c2e3436c8ce6a01ef3b59e3df772a" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b882b8e6bfcd708042ff00f7adc67ce750817dd0", - "internalRef": "318028816" + "remote": "git@github.com:plamut/python-pubsub.git", + "sha": "c29d7f891c776e1a3fcb1cbfc7f549ca0772f38e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } } ], diff --git a/synth.py b/synth.py index 0e2c96e42..fe1b0838e 100644 --- a/synth.py +++ b/synth.py @@ -45,234 +45,75 @@ ], ) -# Adjust tests to import the clients directly. +# DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import publisher_client", -) - -s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", " pubsub_v1", " publisher_client" -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import subscriber_client", -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - " pubsub_v1", - " subscriber_client", -) - -# DEFAULT SCOPES are being used. so let's force them in. -s.replace( - "google/cloud/pubsub_v1/gapic/*er_client.py", - "# The name of the interface for this client. This is the key used to", - """# The scopes needed to make gRPC calls to all of the methods defined in + "google/pubsub_v1/services/*er/*client.py", + r"DEFAULT_ENDPOINT = 'pubsub\.googleapis\.com'", + """ + # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) - - \g<0>""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) - -# Doc strings are formatted poorly -s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - 'DESCRIPTOR = _MESSAGESTORAGEPOLICY,\n\s+__module__.*\n\s+,\n\s+__doc__ = """', - "\g<0>A message storage policy.\n\n\n ", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "subscription \(str\): The subscription whose backlog .*\n(.*\n)+?" - "\s+Format is .*", - """subscription (str): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: \\ - (a) The existing backlog on the subscription. More precisely, this is \\ - defined as the messages in the subscription's backlog that are \\ - unacknowledged upon the successful completion of the \\ - `CreateSnapshot` request; as well as: \\ - (b) Any messages published to the subscription's topic following the \\ - successful completion of the CreateSnapshot request. \\ - - Format is ``projects/{project}/subscriptions/{sub}``.""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import functools\n", - "import collections\n" - "from copy import deepcopy\n\g<0>" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import pkg_resources\n", - "\g<0>import six\n" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "class PublisherClient", - """# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - \n\n\g<0>""" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "client_config \(dict\): DEPRECATED.", - "client_config (dict):" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "# Raise deprecation warnings .*\n.*\n.*\n.*\n.*\n.*\n", - """default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - """ -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) + 'https://www.googleapis.com/auth/pubsub', + ) -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" -# Temporary fixup for 'grpc-google-iam-vi 0.12.4' (before generation). -s.replace( - "google/cloud/pubsub_v1/gapic/transports/*_grpc_transport.py", - "from google.iam.v1 import iam_policy_pb2", - "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", + \g<0>""", ) # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream # results. s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"return self\._inner_api_calls\['streaming_pull'\]\(.*", + "google/pubsub_v1/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), """ - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False - \g<0>""" + \g<0>""", ) -# Add missing blank line before Attributes: in generated docstrings -# https://github.com/googleapis/protoc-docs-plugin/pull/31 +# Docstrings of *_iam_policy() methods are formatted poorly and must be fixed +# in order to avoid docstring format warnings in docs. s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - "(\s+)Attributes:", - "\n\g<1>Attributes:" -) - -# Fix incomplete docstring examples. -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> subscription = \{'ack_deadline_seconds': ack_deadline_seconds\}", - textwrap.indent( - """ ->>> subscription_name = 'projects/my-project/subscriptions/my-subscription' ->>> subscription = { -... 'name': subscription_name, -... 'ack_deadline_seconds': ack_deadline_seconds, -... }""", - prefix=" " * 12, - ) + "google/pubsub_v1/services/*er/client.py", + r"(\s+)Args:", + "\n\g<1>Args:" ) - s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> snapshot = \{'expire_time': expire_time\}", - textwrap.indent( - """ ->>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' ->>> snapshot = { -... 'name': snapshot_name, -... 'expire_time': expire_time, -... }""", - prefix=" " * 12, - ) + "google/pubsub_v1/services/*er/client.py", + r"(\s+)\*\*JSON Example\*\*\s+::", + "\n\g<1>**JSON Example**::\n", ) - s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `topic`:\n\s+>>> topic = \{\}\n", - textwrap.indent( - """ ->>> topic_name = 'projects/my-project/topics/my-topic' ->>> topic_labels = {'source': 'external'} ->>> topic = {'name': topic_name, 'labels': topic_labels} -""", - prefix=" " * 12, - ), + "google/pubsub_v1/services/*er/client.py", + r"(\s+)\*\*YAML Example\*\*\s+::", + "\n\g<1>**YAML Example**::\n", ) - s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `update_mask`:\n\s+>>> update_mask = \{\}\n", - textwrap.indent( - """ ->>> paths_element = 'labels' ->>> paths = [paths_element] ->>> update_mask = {'paths': paths} -""", - prefix=" " * 12, - ), + "google/pubsub_v1/services/*er/client.py", + r"(\s+)For a description of IAM and its features, see", + "\n\g<0>", ) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, + microgenerator=True, + samples=True, cov_level=99, system_test_external_dependencies=["psutil"], - samples=True, ) -s.move(templated_files) +s.move(templated_files, excludes=[".coveragerc"]) # ---------------------------------------------------------------------------- # Samples templates diff --git a/tests/system.py b/tests/system.py index 1694c5022..bbedd9a11 100644 --- a/tests/system.py +++ b/tests/system.py @@ -32,18 +32,19 @@ from google.cloud.pubsub_v1 import exceptions from google.cloud.pubsub_v1 import futures from google.cloud.pubsub_v1 import types +from google.pubsub_v1 import types as gapic_types from test_utils.system import unique_resource_id -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def project(): _, default_project = google.auth.default() yield default_project -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def publisher(): yield pubsub_v1.PublisherClient() @@ -71,15 +72,15 @@ def cleanup(): yield registry # Perform all clean up. - for to_call, argument in registry: - to_call(argument) + for to_call, args, kwargs in registry: + to_call(*args, **kwargs) def test_publish_messages(publisher, topic_path, cleanup): # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [ publisher.publish( @@ -95,7 +96,7 @@ def test_publish_messages(publisher, topic_path, cleanup): def test_publish_large_messages(publisher, topic_path, cleanup): # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Each message should be smaller than 10**7 bytes (the server side limit for # PublishRequest), but all messages combined in a PublishRequest should @@ -110,7 +111,7 @@ def test_publish_large_messages(publisher, topic_path, cleanup): max_latency=2.0, # so that autocommit happens after publishing all messages max_messages=100, ) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [publisher.publish(topic_path, msg_data, num=str(i)) for i in range(5)] @@ -126,15 +127,17 @@ def test_subscribe_to_messages( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ @@ -169,15 +172,17 @@ def test_subscribe_to_messages_async_callbacks( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ @@ -219,26 +224,27 @@ def test_creating_subscriptions_with_non_default_settings( publisher, subscriber, project, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - - msg_retention_duration = {"seconds": 911} - expiration_policy = {"ttl": {"seconds": 90210}} - new_subscription = subscriber.create_subscription( - subscription_path, - topic_path, - ack_deadline_seconds=30, - retain_acked_messages=True, - message_retention_duration=msg_retention_duration, - expiration_policy=expiration_policy, - ) + publisher.create_topic(name=topic_path) + + request = { + "name": subscription_path, + "topic": topic_path, + "ack_deadline_seconds": 30, + "retain_acked_messages": True, + "message_retention_duration": {"seconds": 911}, + "expiration_policy": {"ttl": {"seconds": 90210}}, # 1 day, 3810 seconds + } + new_subscription = subscriber.create_subscription(request) # fetch the subscription and check its settings - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = [sub for sub in subscriptions if sub.topic == topic_path] assert len(subscriptions) == 1 @@ -248,7 +254,9 @@ def test_creating_subscriptions_with_non_default_settings( assert subscription.ack_deadline_seconds == 30 assert subscription.retain_acked_messages assert subscription.message_retention_duration.seconds == 911 - assert subscription.expiration_policy.ttl.seconds == 90210 + assert subscription.expiration_policy.ttl == datetime.timedelta( + days=1, seconds=3810 + ) def test_listing_project_topics(publisher, project, cleanup): @@ -257,11 +265,11 @@ def test_listing_project_topics(publisher, project, cleanup): for i in range(1, 4) ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) - project_path = publisher.project_path(project) - project_topics = publisher.list_topics(project_path) + project_path = f"projects/{project}" + project_topics = publisher.list_topics(project=project_path) project_topics = set(t.name for t in project_topics) # there might be other topics in the project, thus do a "is subset" check @@ -275,8 +283,8 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -287,12 +295,14 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = set(s.name for s in subscriptions) # there might be other subscriptions in the project, thus do a "is subset" check @@ -306,8 +316,8 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -318,31 +328,35 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - subscriptions = publisher.list_topic_subscriptions(topic_paths[0]) - subscriptions = set(subscriptions) + response = publisher.list_topic_subscriptions(topic=topic_paths[0]) + subscriptions = set(response) assert subscriptions == {subscription_paths[0], subscription_paths[2]} def test_managing_topic_iam_policy(publisher, topic_path, cleanup): - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # create a topic and customize its policy - publisher.create_topic(topic_path) - topic_policy = publisher.get_iam_policy(topic_path) + publisher.create_topic(name=topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) topic_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) topic_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = publisher.set_iam_policy(topic_path, topic_policy) + new_policy = publisher.set_iam_policy( + request={"resource": topic_path, "policy": topic_policy} + ) # fetch the topic policy again and check its values - topic_policy = publisher.get_iam_policy(topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) assert topic_policy.bindings == new_policy.bindings assert len(topic_policy.bindings) == 2 @@ -358,22 +372,26 @@ def test_managing_subscription_iam_policy( publisher, subscriber, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) - sub_policy = subscriber.get_iam_policy(subscription_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) sub_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) sub_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = subscriber.set_iam_policy(subscription_path, sub_policy) + new_policy = subscriber.set_iam_policy( + request={"resource": subscription_path, "policy": sub_policy} + ) # fetch the subscription policy again and check its values - sub_policy = subscriber.get_iam_policy(subscription_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) assert sub_policy.bindings == new_policy.bindings assert len(sub_policy.bindings) == 2 @@ -397,17 +415,15 @@ def test_subscriber_not_leaking_open_sockets( # subscriber releases the sockets, too. subscriber = pubsub_v1.SubscriberClient() subscriber_2 = pubsub_v1.SubscriberClient() - cleanup.append((subscriber_2.delete_subscription, subscription_path)) - - def one_arg_close(subscriber): # the cleanup helper expects exactly one argument - subscriber.close() - - cleanup.append((one_arg_close, subscriber_2)) - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append( + (subscriber_2.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber_2.close, (), {})) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Create topic before starting to track connection count (any sockets opened # by the publisher client are not counted by this test). - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) current_process = psutil.Process() conn_count_start = len(current_process.connections()) @@ -419,14 +435,14 @@ def one_arg_close(subscriber): # the cleanup helper expects exactly one argumen # Publish a few messages, wait for the publish to succeed. publish_futures = [ - publisher.publish(topic_path, u"message {}".format(i).encode()) + publisher.publish(topic_path, "message {}".format(i).encode()) for i in range(1, 4) ] for future in publish_futures: future.result() # Synchronously pull messages. - response = subscriber.pull(subscription_path, max_messages=3) + response = subscriber.pull(subscription=subscription_path, max_messages=3) assert len(response.received_messages) == 3 conn_count_end = len(current_process.connections()) @@ -437,15 +453,17 @@ def test_synchronous_pull_no_deadline_error_if_no_messages( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) try: - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) except core_exceptions.DeadlineExceeded: pytest.fail( "Unexpected DeadlineExceeded error on synchronous pull when no " @@ -460,12 +478,14 @@ def test_streaming_pull_callback_error_propagation( self, publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # publish a messages and wait until published future = publisher.publish(topic_path, b"hello!") @@ -486,17 +506,19 @@ def test_streaming_pull_ack_deadline( self, publisher, subscriber, project, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and a subscription, then subscribe to the topic. This # must happen before the messages are published. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription( - subscription_path, topic_path, ack_deadline_seconds=45 + name=subscription_path, topic=topic_path, ack_deadline_seconds=45 ) # publish some messages and wait for completion @@ -535,12 +557,14 @@ def test_streaming_pull_max_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) batch_sizes = (7, 4, 8, 2, 10, 1, 3, 8, 6, 1) # total: 50 _publish_messages(publisher, topic_path, batch_sizes=batch_sizes) @@ -596,12 +620,14 @@ def test_streaming_pull_subscriber_permissions_sufficient( ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # A service account granting only the pubsub.subscriber role must be used. filename = os.path.join( @@ -631,12 +657,14 @@ def test_publisher_role_can_publish_messages( ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Create a publisher client with only the publisher role only. filename = os.path.join( @@ -646,7 +674,7 @@ def test_publisher_role_can_publish_messages( _publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) assert len(response.received_messages) == 2 @pytest.mark.skip( @@ -659,14 +687,16 @@ def test_snapshot_seek_subscriber_permissions_sufficient( snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((subscriber.delete_snapshot, snapshot_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) subscriber.create_subscription( - subscription_path, topic_path, retain_acked_messages=True + name=subscription_path, topic=topic_path, retain_acked_messages=True ) # A service account granting only the pubsub.subscriber role must be used. @@ -677,20 +707,23 @@ def test_snapshot_seek_subscriber_permissions_sufficient( # Publish two messages and create a snapshot inbetween. _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 - subscriber.create_snapshot(snapshot_path, subscription_path) + subscriber.create_snapshot(name=snapshot_path, subscription=subscription_path) _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 # A subscriber-only client should be allowed to seek to a snapshot. - subscriber_only_client.seek(subscription_path, snapshot=snapshot_path) + seek_request = gapic_types.SeekRequest( + subscription=subscription_path, snapshot=snapshot_path + ) + subscriber_only_client.seek(seek_request) # We should receive one message again, since we sought back to a snapshot. - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 def test_viewer_role_can_list_resources( @@ -699,9 +732,9 @@ def test_viewer_role_can_list_resources( project_path = "projects/" + project # Make sure the created topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # A service account granting only the pubsub.viewer role must be used. filename = os.path.join( @@ -712,10 +745,17 @@ def test_viewer_role_can_list_resources( # The following operations should not raise permission denied errors. # NOTE: At least one topic exists. - topic = next(iter(viewer_only_publisher.list_topics(project_path))) - next(iter(viewer_only_publisher.list_topic_subscriptions(topic.name)), None) - next(iter(viewer_only_subscriber.list_subscriptions(project_path)), None) - next(iter(viewer_only_subscriber.list_snapshots(project_path)), None) + topic = next(iter(viewer_only_publisher.list_topics(project=project_path))) + + next( + iter(viewer_only_publisher.list_topic_subscriptions(topic=topic.name)), None + ) + + next( + iter(viewer_only_subscriber.list_subscriptions(project=project_path)), None + ) + + next(iter(viewer_only_subscriber.list_snapshots(project=project_path)), None) def test_editor_role_can_create_resources( self, project, publisher, topic_path, subscriber, subscription_path, cleanup @@ -724,9 +764,11 @@ def test_editor_role_can_create_resources( snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the created resources get deleted. - cleanup.append((subscriber.delete_snapshot, snapshot_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # A service account granting only the pubsub.editor role must be used. filename = os.path.join( @@ -736,9 +778,11 @@ def test_editor_role_can_create_resources( editor_publisher = type(publisher).from_service_account_file(filename) # The following operations should not raise permission denied errors. - editor_publisher.create_topic(topic_path) - editor_subscriber.create_subscription(subscription_path, topic_path) - editor_subscriber.create_snapshot(snapshot_path, subscription_path) + editor_publisher.create_topic(name=topic_path) + editor_subscriber.create_subscription(name=subscription_path, topic=topic_path) + editor_subscriber.create_snapshot( + name=snapshot_path, subscription=subscription_path + ) def _publish_messages(publisher, topic_path, batch_sizes): @@ -760,7 +804,7 @@ def _publish_messages(publisher, topic_path, batch_sizes): def _make_messages(count): messages = [ - u"message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + "message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) ] return messages diff --git a/tests/unit/gapic/pubsub_v1/__init__.py b/tests/unit/gapic/pubsub_v1/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/pubsub_v1/test_publisher.py b/tests/unit/gapic/pubsub_v1/test_publisher.py new file mode 100644 index 000000000..f159c1cfb --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -0,0 +1,3106 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.publisher import PublisherAsyncClient +from google.pubsub_v1.services.publisher import PublisherClient +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.services.publisher import transports +from google.pubsub_v1.types import pubsub + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublisherClient._get_default_mtls_endpoint(None) is None + assert PublisherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + PublisherClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +def test_publisher_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_publisher_client_get_transport_class(): + transport = PublisherClient.get_transport_class() + assert transport == transports.PublisherGrpcTransport + + transport = PublisherClient.get_transport_class("grpc") + assert transport == transports.PublisherGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) +) +@mock.patch.object( + PublisherAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PublisherAsyncClient), +) +def test_publisher_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_publisher_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_publisher_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_publisher_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PublisherClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Topic() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_topic_from_dict(): + test_create_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.Topic() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_topic(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_create_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_create_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_topic(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_create_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_topic( + pubsub.Topic(), name="name_value", + ) + + +def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_update_topic_from_dict(): + test_update_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_update_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + + +def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse(message_ids=["message_ids_value"],) + + response = client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PublishRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + + assert response.message_ids == ["message_ids_value"] + + +def test_publish_from_dict(): + test_publish(request_type=dict) + + +@pytest.mark.asyncio +async def test_publish_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.PublishRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse(message_ids=["message_ids_value"],) + ) + + response = await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + + assert response.message_ids == ["message_ids_value"] + + +def test_publish_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + call.return_value = pubsub.PublishResponse() + + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_publish_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_publish_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.publish( + topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + + +def test_publish_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +@pytest.mark.asyncio +async def test_publish_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.publish( + topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + + +@pytest.mark.asyncio +async def test_publish_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_topic_from_dict(): + test_get_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_get_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_get_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_get_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_get_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_topic( + pubsub.GetTopicRequest(), topic="topic_value", + ) + + +def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_from_dict(): + test_list_topics(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topics_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + call.return_value = pubsub.ListTopicsResponse() + + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topics_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_topics_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topics(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_topics_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_topics_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topics(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_topics_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topics( + pubsub.ListTopicsRequest(), project="project_value", + ) + + +def test_list_topics_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_topics(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) for i in results) + + +def test_list_topics_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + pages = list(client.list_topics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topics_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + async_pager = await client.list_topics(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Topic) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topics_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topics(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_subscriptions( + transport: str = "grpc", request_type=pubsub.ListTopicSubscriptionsRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + + response = client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + + assert response.subscriptions == ["subscriptions_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_from_dict(): + test_list_topic_subscriptions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicSubscriptionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) + + assert response.subscriptions == ["subscriptions_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_list_topic_subscriptions_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_subscriptions(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_list_topic_subscriptions_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_subscriptions(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + ) + + +def test_list_topic_subscriptions_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_subscriptions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_subscriptions_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + pages = list(client.list_topic_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + async_pager = await client.list_topic_subscriptions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_subscriptions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_snapshots( + transport: str = "grpc", request_type=pubsub.ListTopicSnapshotsRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], next_page_token="next_page_token_value", + ) + + response = client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + + assert response.snapshots == ["snapshots_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_from_dict(): + test_list_topic_snapshots(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicSnapshotsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], next_page_token="next_page_token_value", + ) + ) + + response = await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) + + assert response.snapshots == ["snapshots_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_list_topic_snapshots_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_snapshots(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_list_topic_snapshots_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_snapshots(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + ) + + +def test_list_topic_snapshots_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_snapshots(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_snapshots_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + pages = list(client.list_topic_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + async_pager = await client.list_topic_snapshots(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_snapshots(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteTopicRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_from_dict(): + test_delete_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + call.return_value = None + + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_delete_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_delete_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_topic( + pubsub.DeleteTopicRequest(), topic="topic_value", + ) + + +def test_detach_subscription( + transport: str = "grpc", request_type=pubsub.DetachSubscriptionRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.DetachSubscriptionResponse() + + response = client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DetachSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_from_dict(): + test_detach_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_detach_subscription_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DetachSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + + response = await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.detach_subscription), "__call__" + ) as call: + call.return_value = pubsub.DetachSubscriptionResponse() + + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_detach_subscription_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.detach_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = PublisherClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PublisherGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.PublisherGrpcTransport,) + + +def test_publisher_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.PublisherTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_publisher_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PublisherTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_topic", + "update_topic", + "publish", + "get_topic", + "list_topics", + "list_topic_subscriptions", + "list_topic_snapshots", + "delete_topic", + "detach_subscription", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_publisher_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_publisher_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + PublisherClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +def test_publisher_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.PublisherGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_publisher_host_no_port(): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_publisher_host_with_port(): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:8000" + + +def test_publisher_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_publisher_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_publisher_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_publisher_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_publisher_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_publisher_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_topic_path(): + project = "squid" + topic = "clam" + + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + actual = PublisherClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "whelk", + "topic": "octopus", + } + path = PublisherClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_topic_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PublisherClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/tests/unit/gapic/pubsub_v1/test_subscriber.py b/tests/unit/gapic/pubsub_v1/test_subscriber.py new file mode 100644 index 000000000..be5460b48 --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -0,0 +1,4394 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber import SubscriberClient +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.services.subscriber import transports +from google.pubsub_v1.types import pubsub + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SubscriberClient._get_default_mtls_endpoint(None) is None + assert ( + SubscriberClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +def test_subscriber_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_subscriber_client_get_transport_class(): + transport = SubscriberClient.get_transport_class() + assert transport == transports.SubscriberGrpcTransport + + transport = SubscriberClient.get_transport_class("grpc") + assert transport == transports.SubscriberGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) +) +@mock.patch.object( + SubscriberAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SubscriberAsyncClient), +) +def test_subscriber_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_subscriber_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_subscriber_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_subscriber_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SubscriberClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscription): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Subscription() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_create_subscription_from_dict(): + test_create_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.Subscription() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_create_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].topic == "topic_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + assert args[0].ack_deadline_seconds == 2066 + + +def test_create_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].topic == "topic_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + assert args[0].ack_deadline_seconds == 2066 + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +def test_get_subscription( + transport: str = "grpc", request_type=pubsub.GetSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_get_subscription_from_dict(): + test_get_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_get_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_get_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_get_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +def test_get_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subscription( + pubsub.GetSubscriptionRequest(), subscription="subscription_value", + ) + + +def test_update_subscription( + transport: str = "grpc", request_type=pubsub.UpdateSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_update_subscription_from_dict(): + test_update_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_update_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=subscription.name/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=subscription.name/value", + ) in kw["metadata"] + + +def test_list_subscriptions( + transport: str = "grpc", request_type=pubsub.ListSubscriptionsRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_from_dict(): + test_list_subscriptions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListSubscriptionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListSubscriptionsResponse() + + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subscriptions_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_subscriptions_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subscriptions(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_subscriptions_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subscriptions(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), project="project_value", + ) + + +def test_list_subscriptions_pager(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_subscriptions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in results) + + +def test_list_subscriptions_pages(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + pages = list(client.list_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pager(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + async_pager = await client.list_subscriptions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in responses) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pages(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_subscriptions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_subscription( + transport: str = "grpc", request_type=pubsub.DeleteSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_from_dict(): + test_delete_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + call.return_value = None + + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_delete_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_delete_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +def test_delete_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + ) + + +def test_modify_ack_deadline( + transport: str = "grpc", request_type=pubsub.ModifyAckDeadlineRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_from_dict(): + test_modify_ack_deadline(request_type=dict) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ModifyAckDeadlineRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = None + + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_modify_ack_deadline_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + assert args[0].ack_deadline_seconds == 2066 + + +def test_modify_ack_deadline_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + assert args[0].ack_deadline_seconds == 2066 + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.AcknowledgeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_from_dict(): + test_acknowledge(request_type=dict) + + +@pytest.mark.asyncio +async def test_acknowledge_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.AcknowledgeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + call.return_value = None + + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_acknowledge_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_acknowledge_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge( + subscription="subscription_value", ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + +def test_acknowledge_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge( + subscription="subscription_value", ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + response = client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PullRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_from_dict(): + test_pull(request_type=dict) + + +@pytest.mark.asyncio +async def test_pull_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.PullRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + + response = await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + call.return_value = pubsub.PullResponse() + + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_pull_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_pull_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].return_immediately == True + + assert args[0].max_messages == 1277 + + +def test_pull_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +@pytest.mark.asyncio +async def test_pull_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].return_immediately == True + + assert args[0].max_messages == 1277 + + +@pytest.mark.asyncio +async def test_pull_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +def test_streaming_pull( + transport: str = "grpc", request_type=pubsub.StreamingPullRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.streaming_pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([pubsub.StreamingPullResponse()]) + + response = client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, pubsub.StreamingPullResponse) + + +def test_streaming_pull_from_dict(): + test_streaming_pull(request_type=dict) + + +@pytest.mark.asyncio +async def test_streaming_pull_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.StreamingPullRequest() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.streaming_pull), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[pubsub.StreamingPullResponse()] + ) + + response = await client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, pubsub.StreamingPullResponse) + + +def test_modify_push_config( + transport: str = "grpc", request_type=pubsub.ModifyPushConfigRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyPushConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_from_dict(): + test_modify_push_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_modify_push_config_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ModifyPushConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + call.return_value = None + + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_modify_push_config_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_modify_push_config_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + +def test_modify_push_config_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_get_snapshot_from_dict(): + test_get_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_get_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +def test_get_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +def test_get_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_snapshot( + pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + ) + + +def test_list_snapshots( + transport: str = "grpc", request_type=pubsub.ListSnapshotsRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_from_dict(): + test_list_snapshots(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_snapshots_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListSnapshotsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + call.return_value = pubsub.ListSnapshotsResponse() + + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_snapshots_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_snapshots_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_snapshots(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_snapshots_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_snapshots(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_snapshots( + pubsub.ListSnapshotsRequest(), project="project_value", + ) + + +def test_list_snapshots_pager(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_snapshots(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in results) + + +def test_list_snapshots_pages(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + pages = list(client.list_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pager(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + async_pager = await client.list_snapshots(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in responses) + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pages(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_snapshots(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_snapshot( + transport: str = "grpc", request_type=pubsub.CreateSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.CreateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_create_snapshot_from_dict(): + test_create_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.CreateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_create_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_snapshot( + name="name_value", subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].subscription == "subscription_value" + + +def test_create_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_snapshot( + name="name_value", subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +def test_update_snapshot( + transport: str = "grpc", request_type=pubsub.UpdateSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_update_snapshot_from_dict(): + test_update_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_update_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ + "metadata" + ] + + +def test_delete_snapshot( + transport: str = "grpc", request_type=pubsub.DeleteSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_from_dict(): + test_delete_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + call.return_value = None + + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +def test_delete_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +def test_delete_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + ) + + +def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.SeekResponse() + + response = client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.SeekRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_from_dict(): + test_seek(request_type=dict) + + +@pytest.mark.asyncio +async def test_seek_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.SeekRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + + response = await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.seek), "__call__") as call: + call.return_value = pubsub.SeekResponse() + + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_seek_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = SubscriberClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.SubscriberGrpcTransport,) + + +def test_subscriber_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.SubscriberTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_subscriber_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SubscriberTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_subscription", + "get_subscription", + "update_subscription", + "list_subscriptions", + "delete_subscription", + "modify_ack_deadline", + "acknowledge", + "pull", + "streaming_pull", + "modify_push_config", + "get_snapshot", + "list_snapshots", + "create_snapshot", + "update_snapshot", + "delete_snapshot", + "seek", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_subscriber_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_subscriber_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + SubscriberClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +def test_subscriber_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.SubscriberGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_subscriber_host_no_port(): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_subscriber_host_with_port(): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:8000" + + +def test_subscriber_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_subscriber_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_subscriber_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_subscriber_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_subscriber_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_subscriber_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_snapshot_path(): + project = "squid" + snapshot = "clam" + + expected = "projects/{project}/snapshots/{snapshot}".format( + project=project, snapshot=snapshot, + ) + actual = SubscriberClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "whelk", + "snapshot": "octopus", + } + path = SubscriberClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_snapshot_path(path) + assert expected == actual + + +def test_subscription_path(): + project = "squid" + subscription = "clam" + + expected = "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + actual = SubscriberClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "whelk", + "subscription": "octopus", + } + path = SubscriberClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_subscription_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SubscriberClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/tests/unit/gapic/v1/test_publisher_client_v1.py b/tests/unit/gapic/v1/test_publisher_client_v1.py deleted file mode 100644 index ad4f38d8b..000000000 --- a/tests/unit/gapic/v1/test_publisher_client_v1.py +++ /dev/null @@ -1,560 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestPublisherClient(object): - def test_create_topic(self): - # Setup Expected Response - name_2 = "name2-1052831874" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name_2, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_topic(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Topic(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_topic(name) - - def test_update_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = {} - update_mask = {} - - response = client.update_topic(topic, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateTopicRequest( - topic=topic, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_topic(topic, update_mask) - - def test_publish(self): - # Setup Expected Response - message_ids_element = "messageIdsElement-744837059" - message_ids = [message_ids_element] - expected_response = {"message_ids": message_ids} - expected_response = pubsub_pb2.PublishResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - response = client.publish(topic, messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_publish_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - with pytest.raises(CustomException): - client.publish(topic, messages) - - def test_get_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.get_topic(topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.get_topic(topic) - - def test_list_topics(self): - # Setup Expected Response - next_page_token = "" - topics_element = {} - topics = [topics_element] - expected_response = {"next_page_token": next_page_token, "topics": topics} - expected_response = pubsub_pb2.ListTopicsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.topics[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topics_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = "subscriptionsElement1698708147" - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListTopicSubscriptionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSubscriptionsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = "snapshotsElement1339034092" - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListTopicSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_topic(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - client.delete_topic(topic) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.delete_topic(topic) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_detach_subscription(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.DetachSubscriptionResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.detach_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DetachSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_detach_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.detach_subscription(subscription) diff --git a/tests/unit/gapic/v1/test_subscriber_client_v1.py b/tests/unit/gapic/v1/test_subscriber_client_v1.py deleted file mode 100644 index b059214d7..000000000 --- a/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ /dev/null @@ -1,892 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - def stream_stream( - self, method, request_serializer=None, response_deserializer=None - ): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestSubscriberClient(object): - def test_create_subscription(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic_2 = "topic2-1139259102" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name_2, - "topic": topic_2, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_subscription(name, topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Subscription(name=name, topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_subscription(name, topic) - - def test_get_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.get_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.get_subscription(subscription) - - def test_update_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds_2 = 921632575 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds_2, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_subscription(subscription, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_subscription(subscription, update_mask) - - def test_list_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = {} - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListSubscriptionsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSubscriptionsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_subscription(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - client.delete_subscription(subscription) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.delete_subscription(subscription) - - def test_get_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - response = client.get_snapshot(snapshot) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.get_snapshot(snapshot) - - def test_modify_ack_deadline(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_ack_deadline_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - with pytest.raises(CustomException): - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - def test_acknowledge(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - client.acknowledge(subscription, ack_ids) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_acknowledge_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - with pytest.raises(CustomException): - client.acknowledge(subscription, ack_ids) - - def test_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.PullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - response = client.pull(subscription, max_messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PullRequest( - subscription=subscription, max_messages=max_messages - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - with pytest.raises(CustomException): - client.pull(subscription, max_messages) - - def test_streaming_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.StreamingPullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - response = client.streaming_pull(requests) - resources = list(response) - assert len(resources) == 1 - assert expected_response == resources[0] - - assert len(channel.requests) == 1 - actual_requests = channel.requests[0][1] - assert len(actual_requests) == 1 - actual_request = list(actual_requests)[0] - assert request == actual_request - - def test_streaming_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - with pytest.raises(CustomException): - client.streaming_pull(requests) - - def test_modify_push_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - client.modify_push_config(subscription, push_config) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_push_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - with pytest.raises(CustomException): - client.modify_push_config(subscription, push_config) - - def test_list_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = {} - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSnapshotsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_snapshot(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic = "topic110546223" - expected_response = {"name": name_2, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.create_snapshot(name, subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.create_snapshot(name, subscription) - - def test_update_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_snapshot(snapshot, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_snapshot(snapshot, update_mask) - - def test_delete_snapshot(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - client.delete_snapshot(snapshot) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.delete_snapshot(snapshot) - - def test_seek(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.SeekResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.seek(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.SeekRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_seek_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.seek(subscription) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) diff --git a/tests/unit/pubsub_v1/publisher/batch/test_base.py b/tests/unit/pubsub_v1/publisher/batch/test_base.py index f10b54ee5..b03dd99de 100644 --- a/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -21,6 +21,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus from google.cloud.pubsub_v1.publisher._batch.thread import Batch +from google.pubsub_v1 import types as gapic_types def create_batch(status=None, settings=types.BatchSettings()): @@ -44,5 +45,5 @@ def create_batch(status=None, settings=types.BatchSettings()): def test_len(): batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) assert len(batch) == 0 - batch.publish(types.PubsubMessage(data=b"foo")) + batch.publish(gapic_types.PubsubMessage(data=b"foo")) assert len(batch) == 1 diff --git a/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/tests/unit/pubsub_v1/publisher/batch/test_thread.py index e9d2b09c0..cd634f8f8 100644 --- a/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -20,6 +20,7 @@ import pytest import google.api_core.exceptions +from google.api_core import gapic_v1 from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types @@ -28,6 +29,7 @@ from google.cloud.pubsub_v1.publisher._batch.base import BatchCancellationReason from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._batch.thread import Batch +from google.pubsub_v1 import types as gapic_types def create_client(): @@ -39,6 +41,7 @@ def create_batch( topic="topic_name", batch_done_callback=None, commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, **batch_settings ): """Return a batch object suitable for testing. @@ -49,6 +52,8 @@ def create_batch( the batch is done, either with a success or a failure flag. commit_when_full (bool): Whether to commit the batch when the batch has reached byte-size or number-of-messages limits. + commit_retry (Optional[google.api_core.retry.Retry]): The retry settings + for the batch commit call. batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. @@ -63,6 +68,7 @@ def create_batch( settings, batch_done_callback=batch_done_callback, commit_when_full=commit_when_full, + commit_retry=commit_retry, ) @@ -116,7 +122,7 @@ def test_blocking__commit(): ) # Set up the underlying API publish method to return a PublishResponse. - publish_response = types.PublishResponse(message_ids=["a", "b"]) + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) patch = mock.patch.object( type(batch.client.api), "publish", return_value=publish_response ) @@ -126,11 +132,12 @@ def test_blocking__commit(): # Establish that the underlying API call was made with expected # arguments. publish.assert_called_once_with( - "topic_name", - [ - types.PubsubMessage(data=b"This is my message."), - types.PubsubMessage(data=b"This is another message."), + topic="topic_name", + messages=[ + gapic_types.PubsubMessage(data=b"This is my message."), + gapic_types.PubsubMessage(data=b"This is another message."), ], + retry=gapic_v1.method.DEFAULT, ) # Establish that all of the futures are done, and that they have the @@ -141,15 +148,36 @@ def test_blocking__commit(): assert futures[1].result() == "b" +def test_blocking__commit_custom_retry(): + batch = create_batch(commit_retry=mock.sentinel.custom_retry) + batch.publish({"data": b"This is my message."}) + + # Set up the underlying API publish method to return a PublishResponse. + publish_response = gapic_types.PublishResponse(message_ids=["a"]) + patch = mock.patch.object( + type(batch.client.api), "publish", return_value=publish_response + ) + with patch as publish: + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + topic="topic_name", + messages=[gapic_types.PubsubMessage(data=b"This is my message.")], + retry=mock.sentinel.custom_retry, + ) + + def test_client_api_publish_not_blocking_additional_publish_calls(): batch = create_batch(max_messages=1) api_publish_called = threading.Event() - def api_publish_delay(_, messages): + def api_publish_delay(topic="", messages=(), retry=None): api_publish_called.set() time.sleep(1.0) message_ids = [str(i) for i in range(len(messages))] - return types.PublishResponse(message_ids=message_ids) + return gapic_types.PublishResponse(message_ids=message_ids) api_publish_patch = mock.patch.object( type(batch.client.api), "publish", side_effect=api_publish_delay @@ -210,7 +238,7 @@ def test_blocking__commit_wrong_messageid_length(): ) # Set up a PublishResponse that only returns one message ID. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( type(batch.client.api), "publish", return_value=publish_response ) @@ -264,9 +292,9 @@ def test_block__commmit_retry_error(): def test_publish_updating_batch_size(): batch = create_batch(topic="topic_foo") messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. @@ -278,9 +306,9 @@ def test_publish_updating_batch_size(): # The size should have been incremented by the sum of the size # contributions of each message to the PublishRequest. - base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() + base_request_size = gapic_types.PublishRequest(topic="topic_foo")._pb.ByteSize() expected_request_size = base_request_size + sum( - types.PublishRequest(messages=[msg]).ByteSize() for msg in messages + gapic_types.PublishRequest(messages=[msg])._pb.ByteSize() for msg in messages ) assert batch.size == expected_request_size @@ -289,7 +317,7 @@ def test_publish_updating_batch_size(): def test_publish(): batch = create_batch() - message = types.PubsubMessage() + message = gapic_types.PubsubMessage() future = batch.publish(message) assert len(batch.messages) == 1 @@ -299,7 +327,7 @@ def test_publish(): def test_publish_max_messages_zero(): batch = create_batch(topic="topic_foo", max_messages=0) - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") with mock.patch.object(batch, "commit") as commit: future = batch.publish(message) @@ -312,8 +340,8 @@ def test_publish_max_messages_zero(): def test_publish_max_messages_enforced(): batch = create_batch(topic="topic_foo", max_messages=1) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") + message = gapic_types.PubsubMessage(data=b"foobarbaz") + message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") future = batch.publish(message) future2 = batch.publish(message2) @@ -327,8 +355,8 @@ def test_publish_max_messages_enforced(): def test_publish_max_bytes_enforced(): batch = create_batch(topic="topic_foo", max_bytes=15) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") + message = gapic_types.PubsubMessage(data=b"foobarbaz") + message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") future = batch.publish(message) future2 = batch.publish(message2) @@ -343,9 +371,9 @@ def test_publish_exceed_max_messages(): max_messages = 4 batch = create_batch(max_messages=max_messages) messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. @@ -359,7 +387,7 @@ def test_publish_exceed_max_messages(): # When a fourth message is published, commit should be called. # No future will be returned in this case. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) commit.assert_called_once_with() assert future is None @@ -374,11 +402,11 @@ def test_publish_single_message_size_exceeds_server_size_limit(): max_bytes=1000 * 1000, # way larger than (mocked) server side limit ) - big_message = types.PubsubMessage(data=b"x" * 984) + big_message = gapic_types.PubsubMessage(data=b"x" * 984) - request_size = types.PublishRequest( + request_size = gapic_types.PublishRequest( topic="topic_foo", messages=[big_message] - ).ByteSize() + )._pb.ByteSize() assert request_size == 1001 # sanity check, just above the (mocked) server limit with pytest.raises(exceptions.MessageTooLargeError): @@ -390,13 +418,15 @@ def test_publish_total_messages_size_exceeds_server_size_limit(): batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500) messages = ( - types.PubsubMessage(data=b"x" * 500), - types.PubsubMessage(data=b"x" * 600), + gapic_types.PubsubMessage(data=b"x" * 500), + gapic_types.PubsubMessage(data=b"x" * 600), ) # Sanity check - request size is still below BatchSettings.max_bytes, # but it exceeds the server-side size limit. - request_size = types.PublishRequest(topic="topic_foo", messages=messages).ByteSize() + request_size = gapic_types.PublishRequest( + topic="topic_foo", messages=messages + )._pb.ByteSize() assert 1000 < request_size < 1500 with mock.patch.object(batch, "commit") as fake_commit: @@ -412,7 +442,7 @@ def test_publish_dict(): future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) # There should be one message on the batch. - expected_message = types.PubsubMessage( + expected_message = gapic_types.PubsubMessage( data=b"foobarbaz", attributes={"spam": "eggs"} ) assert batch.messages == [expected_message] @@ -440,9 +470,9 @@ def test_do_not_commit_when_full_when_flag_is_off(): # Set commit_when_full flag to False batch = create_batch(max_messages=max_messages, commit_when_full=False) messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) with mock.patch.object(batch, "commit") as commit: @@ -451,7 +481,7 @@ def test_do_not_commit_when_full_when_flag_is_off(): assert len(futures) == 3 # When a fourth message is published, commit should not be called. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) assert commit.call_count == 0 assert future is None @@ -471,11 +501,11 @@ def test_batch_done_callback_called_on_success(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) with mock.patch.object( type(batch.client.api), "publish", return_value=publish_response @@ -491,11 +521,11 @@ def test_batch_done_callback_called_on_publish_failure(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) # Induce publish error. error = google.api_core.exceptions.InternalServerError("uh oh") @@ -517,11 +547,11 @@ def test_batch_done_callback_called_on_publish_response_invalid(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # No message ids returned in successful publish response -> invalid. - publish_response = types.PublishResponse(message_ids=[]) + publish_response = gapic_types.PublishResponse(message_ids=[]) with mock.patch.object( type(batch.client.api), "publish", return_value=publish_response diff --git a/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 08e1954e6..de5dd0523 100644 --- a/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -18,14 +18,14 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types _ORDERING_KEY = "ordering_key_1" def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) def create_client(): @@ -172,6 +172,18 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._ordered_batches # batch exists + batch = sequencer._ordered_batches[0] + assert batch._commit_retry is mock.sentinel.custom_retry + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 22e24ed06..b8aff0d2c 100644 --- a/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -17,13 +17,13 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer +from google.pubsub_v1 import types as gapic_types def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) def create_client(): @@ -89,6 +89,17 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._current_batch is not None + assert sequencer._current_batch._commit_retry is mock.sentinel.custom_retry + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/tests/unit/pubsub_v1/publisher/test_flow_controller.py index 26a61663b..54484520d 100644 --- a/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import types as grpc_types def _run_in_daemon( @@ -66,7 +67,7 @@ def test_no_overflow_no_error(): # there should be no errors for data in (b"foo", b"bar", b"baz"): - msg = types.PubsubMessage(data=data) + msg = grpc_types.PubsubMessage(data=data) flow_controller.add(msg) @@ -79,8 +80,8 @@ def test_overflow_no_error_on_ignore(): flow_controller = FlowController(settings) # there should be no overflow errors - flow_controller.add(types.PubsubMessage(data=b"foo")) - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) def test_message_count_overflow_error(): @@ -91,9 +92,9 @@ def test_message_count_overflow_error(): ) flow_controller = FlowController(settings) - flow_controller.add(types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) with pytest.raises(exceptions.FlowControlLimitError) as error: - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) assert "messages: 2 / 1" in str(error.value) @@ -109,14 +110,14 @@ def test_byte_size_overflow_error(): # Since the message data itself occupies 100 bytes, it means that both # messages combined will exceed the imposed byte limit of 199, but a single # message will not (the message size overhead is way lower than data size). - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) with pytest.raises(exceptions.FlowControlLimitError) as error: flow_controller.add(msg2) - total_size = msg1.ByteSize() + msg2.ByteSize() + total_size = msg1._pb.ByteSize() + msg2._pb.ByteSize() expected_info = "bytes: {} / 199".format(total_size) assert expected_info in str(error.value) @@ -129,9 +130,9 @@ def test_no_error_on_moderate_message_flow(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # The flow control settings will accept two in-flight messages, but not three. # If releasing messages works correctly, the sequence below will not raise errors. @@ -151,14 +152,14 @@ def test_rejected_messages_do_not_increase_total_load(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) for _ in range(5): with pytest.raises(exceptions.FlowControlLimitError): - flow_controller.add(types.PubsubMessage(data=b"z" * 100)) + flow_controller.add(grpc_types.PubsubMessage(data=b"z" * 100)) # After releasing a message we should again be able to add another one, despite # previously trying to add a lot of other messages. @@ -174,9 +175,9 @@ def test_incorrectly_releasing_too_many_messages(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # Releasing a message that would make the load negative should result in a warning. with warnings.catch_warnings(record=True) as warned: @@ -196,7 +197,7 @@ def test_incorrectly_releasing_too_many_messages(): error_msg = str(error.value) assert "messages: 2 / 1" in error_msg - total_size = msg2.ByteSize() + msg3.ByteSize() + total_size = msg2._pb.ByteSize() + msg3._pb.ByteSize() expected_size_info = "bytes: {} / 150".format(total_size) assert expected_size_info in error_msg @@ -209,10 +210,10 @@ def test_blocking_on_overflow_until_free_capacity(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) - msg4 = types.PubsubMessage(data=b"w" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) + msg4 = grpc_types.PubsubMessage(data=b"w" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release @@ -286,7 +287,7 @@ def test_error_if_mesage_would_block_indefinitely(): ) flow_controller = FlowController(settings) - msg = types.PubsubMessage(data=b"xyz") + msg = grpc_types.PubsubMessage(data=b"xyz") adding_done = threading.Event() error_event = threading.Event() @@ -303,7 +304,7 @@ def test_error_if_mesage_would_block_indefinitely(): error_msg = str(error_info.value) assert "would block forever" in error_msg assert "messages: 1 / 0" in error_msg - assert "bytes: {} / 1".format(msg.ByteSize()) in error_msg + assert "bytes: {} / 1".format(msg._pb.ByteSize()) in error_msg def test_threads_posting_large_messages_do_not_starve(): @@ -314,7 +315,7 @@ def test_threads_posting_large_messages_do_not_starve(): ) flow_controller = FlowController(settings) - large_msg = types.PubsubMessage(data=b"x" * 100) # close to entire byte limit + large_msg = grpc_types.PubsubMessage(data=b"x" * 100) # close to entire byte limit adding_initial_done = threading.Event() adding_large_done = threading.Event() @@ -325,14 +326,14 @@ def test_threads_posting_large_messages_do_not_starve(): # Occupy some of the flow capacity, then try to add a large message. Releasing # enough messages should eventually allow the large message to come through, even # if more messages are added after it (those should wait for the large message). - initial_messages = [types.PubsubMessage(data=b"x" * 10)] * 5 + initial_messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 5 _run_in_daemon(flow_controller, "add", initial_messages, adding_initial_done) assert adding_initial_done.wait(timeout=0.1) _run_in_daemon(flow_controller, "add", [large_msg], adding_large_done) # Continuously keep adding more messages after the large one. - messages = [types.PubsubMessage(data=b"x" * 10)] * 10 + messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 10 _run_in_daemon(flow_controller, "add", messages, adding_busy_done, action_pause=0.1) # At the same time, gradually keep releasing the messages - the freeed up @@ -372,8 +373,8 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release diff --git a/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/tests/unit/pubsub_v1/publisher/test_publisher_client.py index b58ed133f..bcdbb2f34 100644 --- a/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -15,19 +15,39 @@ from __future__ import absolute_import from __future__ import division +import inspect + from google.auth import credentials import mock import pytest import time -from google.cloud.pubsub_v1.gapic import publisher_client +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.pubsub_v1.services.publisher.transports.grpc import PublisherGrpcTransport + + +def _assert_retries_equal(retry, retry2): + # Retry instances cannot be directly compared, because their predicates are + # different instances of the same function. We thus manually compare their other + # attributes, and then heuristically compare their predicates. + for attr in ("_deadline", "_initial", "_maximum", "_multiplier"): + assert getattr(retry, attr) == getattr(retry2, attr) + + pred = retry._predicate + pred2 = retry2._predicate + assert inspect.getsource(pred) == inspect.getsource(pred2) + assert inspect.getclosurevars(pred) == inspect.getclosurevars(pred2) + def test_init(): creds = mock.Mock(spec=credentials.Credentials) @@ -42,13 +62,13 @@ def test_init(): def test_init_w_custom_transport(): - transport = object() + transport = PublisherGrpcTransport() client = publisher.Client(transport=transport) # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) - assert client.api.transport is transport + assert client.api._transport is transport assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 @@ -59,7 +79,7 @@ def test_init_w_api_endpoint(): client = publisher.Client(client_options=client_options) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -69,7 +89,7 @@ def test_init_w_unicode_api_endpoint(): client = publisher.Client(client_options=client_options) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -78,7 +98,7 @@ def test_init_w_empty_client_options(): client = publisher.Client(client_options={}) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == publisher_client.PublisherClient.SERVICE_ADDRESS @@ -93,7 +113,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.publish._channel + channel = client.api._transport.publish._channel assert channel.target().decode("utf8") == "/foo/bar/" @@ -109,19 +129,6 @@ def test_message_ordering_enabled(): assert client._enable_message_ordering -def test_message_ordering_changes_retry_deadline(): - creds = mock.Mock(spec=credentials.Credentials) - - client = publisher.Client(credentials=creds) - assert client.api._method_configs["Publish"].retry._deadline == 60 - - client = publisher.Client( - publisher_options=types.PublisherOptions(enable_message_ordering=True), - credentials=creds, - ) - assert client.api._method_configs["Publish"].retry._deadline == 2 ** 32 / 1000 - - def test_publish(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -150,8 +157,10 @@ def test_publish(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam")), - mock.call(types.PubsubMessage(data=b"foo", attributes={"bar": "baz"})), + mock.call(gapic_types.PubsubMessage(data=b"spam")), + mock.call( + gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ), ] ) @@ -207,6 +216,46 @@ def test_publish_empty_ordering_key_when_message_ordering_enabled(): assert client.publish(topic, b"bytestring body", ordering_key="") is not None +def test_publish_with_ordering_key_uses_extended_retry_deadline(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(enable_message_ordering=True), + ) + + # Use mocks in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) + batch.publish.return_value = future + + topic = "topic/path" + client._set_batch(topic, batch) + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch) + client._set_batch_class(batch_class) + + # Publish a message with custom retry settings. + custom_retry = retries.Retry( + initial=1, + maximum=20, + multiplier=3.3, + deadline=999, + predicate=retries.if_exception_type(TimeoutError, KeyboardInterrupt), + ) + future = client.publish(topic, b"foo", ordering_key="first", retry=custom_retry) + assert future is mock.sentinel.future + + # Check the retry settings used for the batch. + batch_class.assert_called_once() + _, kwargs = batch_class.call_args + + batch_commit_retry = kwargs["commit_retry"] + expected_retry = custom_retry.with_deadline(2.0 ** 32) + _assert_retries_equal(batch_commit_retry, expected_retry) + + def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -225,7 +274,7 @@ def test_publish_attrs_bytestring(): # The attributes should have been sent as text. batch.publish.assert_called_once_with( - types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) ) @@ -262,8 +311,9 @@ def test_publish_new_batch_needed(): settings=client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, ) - message_pb = types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) batch1.publish.assert_called_once_with(message_pb) batch2.publish.assert_called_once_with(message_pb) @@ -302,13 +352,21 @@ def test_gapic_instance_method(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - ct = mock.Mock() - client.api._inner_api_calls["create_topic"] = ct + transport_mock = mock.Mock(create_topic=mock.sentinel) + fake_create_topic_rpc = mock.Mock() + transport_mock._wrapped_methods = { + transport_mock.create_topic: fake_create_topic_rpc + } + patcher = mock.patch.object(client.api, "_transport", new=transport_mock) + + topic = gapic_types.Topic(name="projects/foo/topics/bar") + + with patcher: + client.create_topic(topic) - client.create_topic("projects/foo/topics/bar") - assert ct.call_count == 1 - _, args, _ = ct.mock_calls[0] - assert args[0] == types.Topic(name="projects/foo/topics/bar") + assert fake_create_topic_rpc.call_count == 1 + _, args, _ = fake_create_topic_rpc.mock_calls[0] + assert args[0] == gapic_types.Topic(name="projects/foo/topics/bar") def test_gapic_class_method_on_class(): @@ -444,9 +502,9 @@ def test_publish_with_ordering_key(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam", ordering_key="k1")), + mock.call(gapic_types.PubsubMessage(data=b"spam", ordering_key="k1")), mock.call( - types.PubsubMessage( + gapic_types.PubsubMessage( data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" ) ), diff --git a/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 43822e96e..288e4bd18 100644 --- a/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -15,11 +15,11 @@ import collections import threading -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1 import types as gapic_types import mock from six.moves import queue @@ -76,7 +76,7 @@ def test_ack(): dispatcher_.ack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) ) manager.leaser.remove.assert_called_once_with(items) @@ -98,7 +98,7 @@ def test_ack_no_time(): dispatcher_.ack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) ) manager.ack_histogram.add.assert_not_called() @@ -127,7 +127,7 @@ def test_ack_splitting_large_payload(): for call in calls: message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) + assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) sent_ack_ids.update(message.ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been ACK-ed @@ -195,7 +195,7 @@ def test_nack(): dispatcher_.nack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[0] ) ) @@ -211,7 +211,7 @@ def test_modify_ack_deadline(): dispatcher_.modify_ack_deadline(items) manager.send.assert_called_once_with( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[60] ) ) @@ -238,7 +238,7 @@ def test_modify_ack_deadline_splitting_large_payload(): for call in calls: message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) + assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) sent_ack_ids.update(message.modify_deadline_ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed diff --git a/tests/unit/pubsub_v1/subscriber/test_message.py b/tests/unit/pubsub_v1/subscriber/test_message.py index 0c8a6d181..09f796480 100644 --- a/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/tests/unit/pubsub_v1/subscriber/test_message.py @@ -21,9 +21,9 @@ from google.protobuf import timestamp_pb2 from google.api_core import datetime_helpers -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.pubsub_v1 import types as gapic_types RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) @@ -36,16 +36,19 @@ def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", **attrs): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS - msg = message.Message( - message=types.PubsubMessage( - attributes=attrs, - data=data, - message_id="message_id", - publish_time=timestamp_pb2.Timestamp( - seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 - ), - ordering_key=ordering_key, + gapic_pubsub_message = gapic_types.PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), + ordering_key=ordering_key, + ) + msg = message.Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, ack_id=ack_id, delivery_attempt=delivery_attempt, request_queue=queue.Queue(), diff --git a/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 946e2598a..6fd83d13a 100644 --- a/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -14,14 +14,14 @@ from six.moves import queue -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold +from google.pubsub_v1 import types as gapic_types def make_message(ack_id, ordering_key): - proto_msg = types.PubsubMessage(data=b"Q", ordering_key=ordering_key) - return message.Message(proto_msg, ack_id, 0, queue.Queue()) + proto_msg = gapic_types.PubsubMessage(data=b"Q", ordering_key=ordering_key) + return message.Message(proto_msg._pb, ack_id, 0, queue.Queue()) def test_init(): diff --git a/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 3f2881df6..d1bac4335 100644 --- a/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -23,7 +23,6 @@ from google.api_core import bidi from google.api_core import exceptions from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client_config from google.cloud.pubsub_v1.subscriber import client from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber import scheduler @@ -33,6 +32,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1 import types as gapic_types import grpc @@ -363,7 +363,7 @@ def test_send_unary(): manager._UNARY_REQUESTS = True manager.send( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( ack_ids=["ack_id1", "ack_id2"], modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], modify_deadline_seconds=[10, 20, 20], @@ -395,7 +395,7 @@ def test_send_unary_empty(): manager = make_manager() manager._UNARY_REQUESTS = True - manager.send(types.StreamingPullRequest()) + manager.send(gapic_types.StreamingPullRequest()) manager._client.acknowledge.assert_not_called() manager._client.modify_ack_deadline.assert_not_called() @@ -410,7 +410,7 @@ def test_send_unary_api_call_error(caplog): error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) assert "The front fell off" in caplog.text @@ -427,7 +427,7 @@ def test_send_unary_retry_error(caplog): manager._client.acknowledge.side_effect = error with pytest.raises(exceptions.RetryError): - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) assert "RetryError while sending unary RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text @@ -450,7 +450,7 @@ def test_heartbeat(): manager.heartbeat() - manager._rpc.send.assert_called_once_with(types.StreamingPullRequest()) + manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) def test_heartbeat_inactive(): @@ -661,7 +661,7 @@ def test__get_initial_request(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == ["1", "2"] @@ -674,7 +674,7 @@ def test__get_initial_request_wo_leaser(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] @@ -686,14 +686,15 @@ def test__on_response_delivery_attempt(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage(data=b"bar", message_id="2"), + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), delivery_attempt=6, ), ] @@ -717,13 +718,15 @@ def test__on_response_no_leaser_overload(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), ] ) @@ -754,16 +757,19 @@ def test__on_response_with_leaser_overload(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), - types.ReceivedMessage( - ack_id="zack", message=types.PubsubMessage(data=b"baz", message_id="3") + gapic_types.ReceivedMessage( + ack_id="zack", + message=gapic_types.PubsubMessage(data=b"baz", message_id="3"), ), ] ) @@ -825,23 +831,23 @@ def test__on_response_with_ordering_keys(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="fack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"foo", message_id="1", ordering_key="" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"bar", message_id="2", ordering_key="key1" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="zack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"baz", message_id="3", ordering_key="key1" ), ), @@ -901,20 +907,6 @@ def test__on_response_with_ordering_keys(): assert manager._messages_on_hold.get() is None -def test_retryable_stream_errors(): - # Make sure the config matches our hard-coded tuple of exceptions. - interfaces = subscriber_client_config.config["interfaces"] - retry_codes = interfaces["google.pubsub.v1.Subscriber"]["retry_codes"] - idempotent = retry_codes["idempotent"] - - status_codes = tuple(getattr(grpc.StatusCode, name, None) for name in idempotent) - expected = tuple( - exceptions.exception_class_for_grpc_status(status_code) - for status_code in status_codes - ) - assert set(expected).issubset(set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) - - def test__should_recover_true(): manager = make_manager() diff --git a/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 310485279..f75f1dae2 100644 --- a/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,9 +16,10 @@ import mock from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport def test_init(): @@ -28,10 +29,10 @@ def test_init(): def test_init_w_custom_transport(): - transport = object() + transport = SubscriberGrpcTransport() client = subscriber.Client(transport=transport) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert client.api.transport is transport + assert client.api._transport is transport def test_init_w_api_endpoint(): @@ -39,7 +40,7 @@ def test_init_w_api_endpoint(): client = subscriber.Client(client_options=client_options) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -49,7 +50,7 @@ def test_init_w_unicode_api_endpoint(): client = subscriber.Client(client_options=client_options) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -58,7 +59,7 @@ def test_init_w_empty_client_options(): client = subscriber.Client(client_options={}) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS @@ -73,7 +74,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.pull._channel + channel = client.api._transport.pull._channel assert channel.target().decode("utf8") == "/baz/bacon/" @@ -138,31 +139,32 @@ def test_subscribe_options(manager_open): def test_close(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) + client = subscriber.Client() + patcher = mock.patch.object(client.api._transport.grpc_channel, "close") - client.close() + with patcher as patched_close: + client.close() - mock_transport.channel.close.assert_called() + patched_close.assert_called() def test_closes_channel_as_context_manager(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) + client = subscriber.Client() + patcher = mock.patch.object(client.api._transport.grpc_channel, "close") - with client: - pass + with patcher as patched_close: + with client: + pass - mock_transport.channel.close.assert_called() + patched_close.assert_called() def test_streaming_pull_gapic_monkeypatch(): - transport = mock.NonCallableMock(spec=["streaming_pull"]) - transport.streaming_pull = mock.Mock(spec=[]) - client = subscriber.Client(transport=transport) + client = subscriber.Client() - client.streaming_pull(requests=iter([])) + with mock.patch("google.api_core.gapic_v1.method.wrap_method"): + client.streaming_pull(requests=iter([])) - assert client.api.transport is transport + transport = client.api._transport assert hasattr(transport.streaming_pull, "_prefetch_first_result_") assert not transport.streaming_pull._prefetch_first_result_