From e7cb216e437d727f8bbf188869fa7ff547b33a1c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 13 Oct 2020 14:54:34 -0700 Subject: [PATCH] fix!: avoid collision with built-in functions by renaming type property to type_ (#53) BREAKING CHANGE: type is renamed to type_ to avoid conflict with built-in functions (introduced in googleapis/gapic-generator-python#595) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(python): use BUILD_SPECIFIC_GCLOUD_PROJECT for samples https://github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32 `BUILD_SPECIFIC_GCLOUD_PROJECT` is an alternate project used for sample tests that do poorly with concurrent runs on the same project. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Sep 30 13:06:03 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: 9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4 Source-Link: https://github.com/googleapis/synthtool/commit/9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4 Co-authored-by: Takashi Matsuo --- .../.kokoro/samples/python3.6/common.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 6 + .../services/data_transfer_service/client.py | 18 +-- .../types/datatransfer.py | 4 +- .../samples/AUTHORING_GUIDE.md | 1 + .../samples/CONTRIBUTING.md | 1 + .../synth.metadata | 109 +++++++++++++++++- 8 files changed, 136 insertions(+), 15 deletions(-) create mode 100644 packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md create mode 100644 packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg index 84052e6fa8a4..d92ddf8df477 100644 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg index 147291a2ee59..8c221a6e556a 100644 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg index b447948a038a..fa5c7d2f21cf 100644 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index f4cd6199644b..f0f518e0fb6f 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -19,10 +19,10 @@ from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -161,9 +161,9 @@ def parse_transfer_config_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DataTransferServiceTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DataTransferServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data transfer service client. @@ -177,8 +177,8 @@ def __init__( transport (Union[str, ~.DataTransferServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -204,9 +204,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 2fab069945e2..c14fbbb358e0 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -70,7 +70,7 @@ class DataSourceParameter(proto.Message): Parameter display name in the user interface. description (str): Parameter description. - type (~.datatransfer.DataSourceParameter.Type): + type_ (~.datatransfer.DataSourceParameter.Type): Parameter type. required (bool): Is parameter required. @@ -122,7 +122,7 @@ class Type(proto.Enum): description = proto.Field(proto.STRING, number=3) - type = proto.Field(proto.ENUM, number=4, enum=Type,) + type_ = proto.Field(proto.ENUM, number=4, enum=Type,) required = proto.Field(proto.BOOL, number=5) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md b/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md b/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index 20b58a931e2c..a036d5945dea 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -3,22 +3,30 @@ { "git": { "name": ".", - "remote": "git@github.com:plamut/python-bigquery-datatransfer.git", - "sha": "41256eec1994fbff48894c7055e6440b4e636628" + "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", + "sha": "cc2b9ff311dfa6ec9d181d4c2a4c952a609f5dec" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "eabe7c0fde64b1451df6ea171b2009238b0df07c", + "internalRef": "335110052" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "da29da32b3a988457b49ae290112b74f14b713cc" + "sha": "9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "da29da32b3a988457b49ae290112b74f14b713cc" + "sha": "9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4" } } ], @@ -32,5 +40,98 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/bigquery_datatransfer_v1/services.rst", + "docs/bigquery_datatransfer_v1/types.rst", + "docs/conf.py", + "docs/multiprocessing.rst", + "google/cloud/bigquery_datatransfer/__init__.py", + "google/cloud/bigquery_datatransfer/py.typed", + "google/cloud/bigquery_datatransfer_v1/__init__.py", + "google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto", + "google/cloud/bigquery_datatransfer_v1/proto/transfer.proto", + "google/cloud/bigquery_datatransfer_v1/py.typed", + "google/cloud/bigquery_datatransfer_v1/services/__init__.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py", + "google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py", + "google/cloud/bigquery_datatransfer_v1/types/__init__.py", + "google/cloud/bigquery_datatransfer_v1/types/datatransfer.py", + "google/cloud/bigquery_datatransfer_v1/types/transfer.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/noxfile.py", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_bigquery_datatransfer_v1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/bigquery_datatransfer_v1/__init__.py", + "tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py" ] } \ No newline at end of file