diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py index 6db2c2fa444b..0ab63dab9375 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py @@ -1031,6 +1031,100 @@ def schedule_transfer_runs( request, retry=retry, timeout=timeout, metadata=metadata ) + def start_manual_transfer_runs( + self, + parent=None, + requested_time_range=None, + requested_run_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Start manual transfer runs to be executed now with schedule\_time equal + to current time. The transfer runs can be created for a time range where + the run\_time is between start\_time (inclusive) and end\_time + (exclusive), or for a specific run\_time. + + Example: + >>> from google.cloud import bigquery_datatransfer_v1 + >>> + >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() + >>> + >>> response = client.start_manual_transfer_runs() + + Args: + parent (str): Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + requested_time_range (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TimeRange]): Time range for the transfer runs that should be started. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.bigquery_datatransfer_v1.types.TimeRange` + requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Specific run\_time for a transfer run to be started. The + requested\_run\_time must not be in the future. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "start_manual_transfer_runs" not in self._inner_api_calls: + self._inner_api_calls[ + "start_manual_transfer_runs" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.start_manual_transfer_runs, + default_retry=self._method_configs["StartManualTransferRuns"].retry, + default_timeout=self._method_configs["StartManualTransferRuns"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + requested_time_range=requested_time_range, + requested_run_time=requested_run_time, + ) + + request = datatransfer_pb2.StartManualTransferRunsRequest( + parent=parent, + requested_time_range=requested_time_range, + requested_run_time=requested_run_time, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["start_manual_transfer_runs"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def get_transfer_run( self, name, @@ -1465,97 +1559,3 @@ def check_valid_creds( return self._inner_api_calls["check_valid_creds"]( request, retry=retry, timeout=timeout, metadata=metadata ) - - def start_manual_transfer_runs( - self, - parent=None, - requested_time_range=None, - requested_run_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> response = client.start_manual_transfer_runs() - - Args: - parent (str): Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - requested_time_range (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TimeRange]): Time range for the transfer runs that should be started. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TimeRange` - requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Specific run\_time for a transfer run to be started. The - requested\_run\_time must not be in the future. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "start_manual_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "start_manual_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.start_manual_transfer_runs, - default_retry=self._method_configs["StartManualTransferRuns"].retry, - default_timeout=self._method_configs["StartManualTransferRuns"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - - request = datatransfer_pb2.StartManualTransferRunsRequest( - parent=parent, - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["start_manual_transfer_runs"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py index 28a9494f22ef..10ac8aaa4e3f 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py @@ -18,71 +18,71 @@ }, "methods": { "GetDataSource": { - "timeout_millis": 20000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ListDataSources": { - "timeout_millis": 20000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CreateTransferConfig": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateTransferConfig": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetTransferConfig": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ListTransferConfigs": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ScheduleTransferRuns": { - "timeout_millis": 30000, + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "StartManualTransferRuns": { + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetTransferRun": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "DeleteTransferRun": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ListTransferRuns": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ListTransferLogs": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CheckValidCreds": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "StartManualTransferRuns": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py index 840fcbeb10d5..c480f976efb3 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py @@ -226,6 +226,22 @@ def schedule_transfer_runs(self): """ return self._stubs["data_transfer_service_stub"].ScheduleTransferRuns + @property + def start_manual_transfer_runs(self): + """Return the gRPC stub for :meth:`DataTransferServiceClient.start_manual_transfer_runs`. + + Start manual transfer runs to be executed now with schedule\_time equal + to current time. The transfer runs can be created for a time range where + the run\_time is between start\_time (inclusive) and end\_time + (exclusive), or for a specific run\_time. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_transfer_service_stub"].StartManualTransferRuns + @property def get_transfer_run(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_run`. @@ -295,19 +311,3 @@ def check_valid_creds(self): deserialized response object. """ return self._stubs["data_transfer_service_stub"].CheckValidCreds - - @property - def start_manual_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.start_manual_transfer_runs`. - - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].StartManualTransferRuns diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index fe9079b160b7..fb350264fdc7 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -1,27 +1,41 @@ { - "updateTime": "2020-02-05T09:47:23.342266Z", + "updateTime": "2020-02-20T22:57:36.119377Z", "sources": [ { "generator": { "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" + "version": "0.45.0", + "dockerImage": "googleapis/artman@sha256:6aec9c34db0e4be221cdaf6faba27bdc07cfea846808b3d3b964dfce3a9a0f9b" + } + }, + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", + "sha": "7786f170e7055fc4ff73dbeff02f8d152e1e7da8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c", - "internalRef": "293257997", - "log": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\n" + "sha": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01", + "internalRef": "296274723", + "log": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\nce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\n" + } + }, + { + "git": { + "name": "synthtool", + "remote": "rpc://devrel/cloud/libraries/tools/autosynth", + "sha": "706a38c26db42299845396cdae55db635c38794a" } }, { "template": { "name": "python_split_library", "origin": "synthtool.gcp", - "version": "2019.10.17" + "version": "2020.2.4" } } ], diff --git a/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py b/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py index 24fc2182dd2f..039871d51212 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py @@ -26,5 +26,5 @@ def test_list_data_sources(self): project_id = os.environ["PROJECT_ID"] client = bigquery_datatransfer_v1.DataTransferServiceClient() - parent = client.location_path(project_id, "us-central1") + parent = client.project_path(project_id) response = client.list_data_sources(parent) diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py index 86fed1b28a85..a24f06476e97 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py @@ -480,6 +480,39 @@ def test_schedule_transfer_runs_exception(self): with pytest.raises(CustomException): client.schedule_transfer_runs(parent, start_time, end_time) + def test_start_manual_transfer_runs(self): + # Setup Expected Response + expected_response = {} + expected_response = datatransfer_pb2.StartManualTransferRunsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + response = client.start_manual_transfer_runs() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datatransfer_pb2.StartManualTransferRunsRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_start_manual_transfer_runs_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + with pytest.raises(CustomException): + client.start_manual_transfer_runs() + def test_get_transfer_run(self): # Setup Expected Response name_2 = "name2-1052831874" @@ -696,36 +729,3 @@ def test_check_valid_creds_exception(self): with pytest.raises(CustomException): client.check_valid_creds(name) - - def test_start_manual_transfer_runs(self): - # Setup Expected Response - expected_response = {} - expected_response = datatransfer_pb2.StartManualTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - response = client.start_manual_transfer_runs() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.StartManualTransferRunsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_start_manual_transfer_runs_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - with pytest.raises(CustomException): - client.start_manual_transfer_runs()