diff --git a/google/cloud/bigquery_datatransfer/__init__.py b/google/cloud/bigquery_datatransfer/__init__.py index bcde8ce8..2aed6efc 100644 --- a/google/cloud/bigquery_datatransfer/__init__.py +++ b/google/cloud/bigquery_datatransfer/__init__.py @@ -38,6 +38,9 @@ from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ( DeleteTransferRunRequest, ) +from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ( + EnrollDataSourcesRequest, +) from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ( GetDataSourceRequest, ) @@ -91,6 +94,7 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferConfig from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferMessage from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferRun +from google.cloud.bigquery_datatransfer_v1.types.transfer import UserInfo from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferState from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferType @@ -104,6 +108,7 @@ "DataSourceParameter", "DeleteTransferConfigRequest", "DeleteTransferRunRequest", + "EnrollDataSourcesRequest", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -125,6 +130,7 @@ "TransferConfig", "TransferMessage", "TransferRun", + "UserInfo", "TransferState", "TransferType", ) diff --git a/google/cloud/bigquery_datatransfer_v1/__init__.py b/google/cloud/bigquery_datatransfer_v1/__init__.py index 1c6f0fc8..bb7dd585 100644 --- a/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -24,6 +24,7 @@ from .types.datatransfer import DataSourceParameter from .types.datatransfer import DeleteTransferConfigRequest from .types.datatransfer import DeleteTransferRunRequest +from .types.datatransfer import EnrollDataSourcesRequest from .types.datatransfer import GetDataSourceRequest from .types.datatransfer import GetTransferConfigRequest from .types.datatransfer import GetTransferRunRequest @@ -45,6 +46,7 @@ from .types.transfer import TransferConfig from .types.transfer import TransferMessage from .types.transfer import TransferRun +from .types.transfer import UserInfo from .types.transfer import TransferState from .types.transfer import TransferType @@ -59,6 +61,7 @@ "DeleteTransferConfigRequest", "DeleteTransferRunRequest", "EmailPreferences", + "EnrollDataSourcesRequest", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -81,4 +84,5 @@ "TransferState", "TransferType", "UpdateTransferConfigRequest", + "UserInfo", ) diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json index 75ee9340..3b914fe7 100644 --- a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json +++ b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json @@ -30,6 +30,11 @@ "delete_transfer_run" ] }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, "GetDataSource": { "methods": [ "get_data_source" @@ -105,6 +110,11 @@ "delete_transfer_run" ] }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, "GetDataSource": { "methods": [ "get_data_source" diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 76ce9750..f80ceb42 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -46,10 +46,8 @@ class DataTransferServiceAsyncClient: - """The Google BigQuery Data Transfer Service API enables - BigQuery users to configure the transfer of their data from - other Google Products into BigQuery. This service contains - methods that are end user exposed. It backs up the frontend. + """This API allows users to manage their data transfers into + BigQuery. """ _client: DataTransferServiceClient @@ -196,7 +194,7 @@ async def get_data_source( metadata: Sequence[Tuple[str, str]] = (), ) -> datatransfer.DataSource: r"""Retrieves a supported data source and returns its - settings, which can be used for UI rendering. + settings. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]): @@ -219,9 +217,8 @@ async def get_data_source( Returns: google.cloud.bigquery_datatransfer_v1.types.DataSource: - Represents data source metadata. - Metadata is sufficient to render UI and - request proper OAuth tokens. + Defines the properties and custom + parameters for a data source. """ # Create or coerce a protobuf request object. @@ -281,7 +278,7 @@ async def list_data_sources( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDataSourcesAsyncPager: r"""Lists supported data sources and returns their - settings, which can be used for UI rendering. + settings. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]): @@ -562,8 +559,8 @@ async def delete_transfer_config( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a data transfer configuration, - including any associated transfer runs and logs. + r"""Deletes a data transfer configuration, including any + associated transfer runs and logs. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]): @@ -1134,14 +1131,13 @@ async def list_transfer_runs( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTransferRunsAsyncPager: - r"""Returns information about running and completed jobs. + r"""Returns information about running and completed + transfer runs. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]): The request object. A request to list data transfer - runs. UI can use this method to show/filter specific - data transfer runs. The data source can use this method - to request all scheduled transfer runs. + runs. parent (:class:`str`): Required. Name of transfer configuration for which transfer runs should be retrieved. Format of transfer @@ -1229,8 +1225,7 @@ async def list_transfer_logs( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTransferLogsAsyncPager: - r"""Returns user facing log messages for the data - transfer run. + r"""Returns log messages for the transfer run. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]): @@ -1324,11 +1319,6 @@ async def check_valid_creds( ) -> datatransfer.CheckValidCredsResponse: r"""Returns true if valid credentials exist for the given data source and requesting user. - Some data sources doesn't support service account, so we - need to talk to them on behalf of the end user. This API - just checks whether we have OAuth token for the - particular user, which is a pre-requisite before user - can create a transfer config. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]): @@ -1408,6 +1398,57 @@ async def check_valid_creds( # Done; return the response. return response + async def enroll_data_sources( + self, + request: Union[datatransfer.EnrollDataSourcesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Enroll data sources in a user project. This allows + users to create transfer configurations for these data + sources. They will also appear in the ListDataSources + RPC and as such, will appear in the BigQuery UI + 'https://bigquery.cloud.google.com' (and the documents + can be found at + https://cloud.google.com/bigquery/bigquery-web-ui and + https://cloud.google.com/bigquery/docs/working-with- + transfers). + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]): + The request object. A request to enroll a set of data + sources so they are visible in the BigQuery UI's + `Transfer` tab. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = datatransfer.EnrollDataSourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.enroll_data_sources, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + async def __aenter__(self): return self diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index c60767da..d3afbdcb 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -84,10 +84,8 @@ def get_transport_class( class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta): - """The Google BigQuery Data Transfer Service API enables - BigQuery users to configure the transfer of their data from - other Google Products into BigQuery. This service contains - methods that are end user exposed. It backs up the frontend. + """This API allows users to manage their data transfers into + BigQuery. """ @staticmethod @@ -412,7 +410,7 @@ def get_data_source( metadata: Sequence[Tuple[str, str]] = (), ) -> datatransfer.DataSource: r"""Retrieves a supported data source and returns its - settings, which can be used for UI rendering. + settings. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]): @@ -435,9 +433,8 @@ def get_data_source( Returns: google.cloud.bigquery_datatransfer_v1.types.DataSource: - Represents data source metadata. - Metadata is sufficient to render UI and - request proper OAuth tokens. + Defines the properties and custom + parameters for a data source. """ # Create or coerce a protobuf request object. @@ -487,7 +484,7 @@ def list_data_sources( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDataSourcesPager: r"""Lists supported data sources and returns their - settings, which can be used for UI rendering. + settings. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]): @@ -758,8 +755,8 @@ def delete_transfer_config( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a data transfer configuration, - including any associated transfer runs and logs. + r"""Deletes a data transfer configuration, including any + associated transfer runs and logs. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]): @@ -1283,14 +1280,13 @@ def list_transfer_runs( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTransferRunsPager: - r"""Returns information about running and completed jobs. + r"""Returns information about running and completed + transfer runs. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]): The request object. A request to list data transfer - runs. UI can use this method to show/filter specific - data transfer runs. The data source can use this method - to request all scheduled transfer runs. + runs. parent (str): Required. Name of transfer configuration for which transfer runs should be retrieved. Format of transfer @@ -1368,8 +1364,7 @@ def list_transfer_logs( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTransferLogsPager: - r"""Returns user facing log messages for the data - transfer run. + r"""Returns log messages for the transfer run. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]): @@ -1453,11 +1448,6 @@ def check_valid_creds( ) -> datatransfer.CheckValidCredsResponse: r"""Returns true if valid credentials exist for the given data source and requesting user. - Some data sources doesn't support service account, so we - need to talk to them on behalf of the end user. This API - just checks whether we have OAuth token for the - particular user, which is a pre-requisite before user - can create a transfer config. Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]): @@ -1527,6 +1517,58 @@ def check_valid_creds( # Done; return the response. return response + def enroll_data_sources( + self, + request: Union[datatransfer.EnrollDataSourcesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Enroll data sources in a user project. This allows + users to create transfer configurations for these data + sources. They will also appear in the ListDataSources + RPC and as such, will appear in the BigQuery UI + 'https://bigquery.cloud.google.com' (and the documents + can be found at + https://cloud.google.com/bigquery/bigquery-web-ui and + https://cloud.google.com/bigquery/docs/working-with- + transfers). + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]): + The request object. A request to enroll a set of data + sources so they are visible in the BigQuery UI's + `Transfer` tab. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.EnrollDataSourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.EnrollDataSourcesRequest): + request = datatransfer.EnrollDataSourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enroll_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + def __enter__(self): return self diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py index 2d662a50..4120fab2 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py @@ -292,6 +292,9 @@ def _prep_wrapped_messages(self, client_info): default_timeout=20.0, client_info=client_info, ), + self.enroll_data_sources: gapic_v1.method.wrap_method( + self.enroll_data_sources, default_timeout=None, client_info=client_info, + ), } def close(self): @@ -450,5 +453,14 @@ def check_valid_creds( ]: raise NotImplementedError() + @property + def enroll_data_sources( + self, + ) -> Callable[ + [datatransfer.EnrollDataSourcesRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + __all__ = ("DataTransferServiceTransport",) diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py index d00ec104..1d5028f1 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -33,10 +33,8 @@ class DataTransferServiceGrpcTransport(DataTransferServiceTransport): """gRPC backend transport for DataTransferService. - The Google BigQuery Data Transfer Service API enables - BigQuery users to configure the transfer of their data from - other Google Products into BigQuery. This service contains - methods that are end user exposed. It backs up the frontend. + This API allows users to manage their data transfers into + BigQuery. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -237,7 +235,7 @@ def get_data_source( r"""Return a callable for the get data source method over gRPC. Retrieves a supported data source and returns its - settings, which can be used for UI rendering. + settings. Returns: Callable[[~.GetDataSourceRequest], @@ -266,7 +264,7 @@ def list_data_sources( r"""Return a callable for the list data sources method over gRPC. Lists supported data sources and returns their - settings, which can be used for UI rendering. + settings. Returns: Callable[[~.ListDataSourcesRequest], @@ -345,8 +343,8 @@ def delete_transfer_config( ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]: r"""Return a callable for the delete transfer config method over gRPC. - Deletes a data transfer configuration, - including any associated transfer runs and logs. + Deletes a data transfer configuration, including any + associated transfer runs and logs. Returns: Callable[[~.DeleteTransferConfigRequest], @@ -548,7 +546,8 @@ def list_transfer_runs( ]: r"""Return a callable for the list transfer runs method over gRPC. - Returns information about running and completed jobs. + Returns information about running and completed + transfer runs. Returns: Callable[[~.ListTransferRunsRequest], @@ -576,8 +575,7 @@ def list_transfer_logs( ]: r"""Return a callable for the list transfer logs method over gRPC. - Returns user facing log messages for the data - transfer run. + Returns log messages for the transfer run. Returns: Callable[[~.ListTransferLogsRequest], @@ -607,11 +605,6 @@ def check_valid_creds( Returns true if valid credentials exist for the given data source and requesting user. - Some data sources doesn't support service account, so we - need to talk to them on behalf of the end user. This API - just checks whether we have OAuth token for the - particular user, which is a pre-requisite before user - can create a transfer config. Returns: Callable[[~.CheckValidCredsRequest], @@ -631,6 +624,40 @@ def check_valid_creds( ) return self._stubs["check_valid_creds"] + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], empty_pb2.Empty]: + r"""Return a callable for the enroll data sources method over gRPC. + + Enroll data sources in a user project. This allows + users to create transfer configurations for these data + sources. They will also appear in the ListDataSources + RPC and as such, will appear in the BigQuery UI + 'https://bigquery.cloud.google.com' (and the documents + can be found at + https://cloud.google.com/bigquery/bigquery-web-ui and + https://cloud.google.com/bigquery/docs/working-with- + transfers). + + Returns: + Callable[[~.EnrollDataSourcesRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_data_sources" not in self._stubs: + self._stubs["enroll_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources", + request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["enroll_data_sources"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py index bcc8e7aa..6fafb5e9 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -34,10 +34,8 @@ class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport): """gRPC AsyncIO backend transport for DataTransferService. - The Google BigQuery Data Transfer Service API enables - BigQuery users to configure the transfer of their data from - other Google Products into BigQuery. This service contains - methods that are end user exposed. It backs up the frontend. + This API allows users to manage their data transfers into + BigQuery. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -241,7 +239,7 @@ def get_data_source( r"""Return a callable for the get data source method over gRPC. Retrieves a supported data source and returns its - settings, which can be used for UI rendering. + settings. Returns: Callable[[~.GetDataSourceRequest], @@ -271,7 +269,7 @@ def list_data_sources( r"""Return a callable for the list data sources method over gRPC. Lists supported data sources and returns their - settings, which can be used for UI rendering. + settings. Returns: Callable[[~.ListDataSourcesRequest], @@ -356,8 +354,8 @@ def delete_transfer_config( ]: r"""Return a callable for the delete transfer config method over gRPC. - Deletes a data transfer configuration, - including any associated transfer runs and logs. + Deletes a data transfer configuration, including any + associated transfer runs and logs. Returns: Callable[[~.DeleteTransferConfigRequest], @@ -564,7 +562,8 @@ def list_transfer_runs( ]: r"""Return a callable for the list transfer runs method over gRPC. - Returns information about running and completed jobs. + Returns information about running and completed + transfer runs. Returns: Callable[[~.ListTransferRunsRequest], @@ -593,8 +592,7 @@ def list_transfer_logs( ]: r"""Return a callable for the list transfer logs method over gRPC. - Returns user facing log messages for the data - transfer run. + Returns log messages for the transfer run. Returns: Callable[[~.ListTransferLogsRequest], @@ -625,11 +623,6 @@ def check_valid_creds( Returns true if valid credentials exist for the given data source and requesting user. - Some data sources doesn't support service account, so we - need to talk to them on behalf of the end user. This API - just checks whether we have OAuth token for the - particular user, which is a pre-requisite before user - can create a transfer config. Returns: Callable[[~.CheckValidCredsRequest], @@ -649,6 +642,40 @@ def check_valid_creds( ) return self._stubs["check_valid_creds"] + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the enroll data sources method over gRPC. + + Enroll data sources in a user project. This allows + users to create transfer configurations for these data + sources. They will also appear in the ListDataSources + RPC and as such, will appear in the BigQuery UI + 'https://bigquery.cloud.google.com' (and the documents + can be found at + https://cloud.google.com/bigquery/bigquery-web-ui and + https://cloud.google.com/bigquery/docs/working-with- + transfers). + + Returns: + Callable[[~.EnrollDataSourcesRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_data_sources" not in self._stubs: + self._stubs["enroll_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources", + request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["enroll_data_sources"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/google/cloud/bigquery_datatransfer_v1/types/__init__.py index b79fc3ff..178c2aa6 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -21,6 +21,7 @@ DataSourceParameter, DeleteTransferConfigRequest, DeleteTransferRunRequest, + EnrollDataSourcesRequest, GetDataSourceRequest, GetTransferConfigRequest, GetTransferRunRequest, @@ -44,6 +45,7 @@ TransferConfig, TransferMessage, TransferRun, + UserInfo, TransferState, TransferType, ) @@ -56,6 +58,7 @@ "DataSourceParameter", "DeleteTransferConfigRequest", "DeleteTransferRunRequest", + "EnrollDataSourcesRequest", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -77,6 +80,7 @@ "TransferConfig", "TransferMessage", "TransferRun", + "UserInfo", "TransferState", "TransferType", ) diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index f47c6426..04a8e5eb 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -48,17 +48,14 @@ "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "EnrollDataSourcesRequest", }, ) class DataSourceParameter(proto.Message): - r"""Represents a data source parameter with validation rules, so - that parameters can be rendered in the UI. These parameters are - given to us by supported data sources, and include all needed - information for rendering and validation. - Thus, whoever uses this api can decide to generate either - generic ui, or custom data source specific forms. + r"""A parameter used to define custom fields in a data source + definition. Attributes: param_id (str): @@ -134,8 +131,8 @@ class Type(proto.Enum): class DataSource(proto.Message): - r"""Represents data source metadata. Metadata is sufficient to - render UI and request proper OAuth tokens. + r"""Defines the properties and custom parameters for a data + source. Attributes: name (str): @@ -532,9 +529,7 @@ def raw_page(self): class ListTransferRunsRequest(proto.Message): - r"""A request to list data transfer runs. UI can use this method - to show/filter specific data transfer runs. The data source can - use this method to request all scheduled transfer runs. + r"""A request to list data transfer runs. Attributes: parent (str): @@ -790,4 +785,21 @@ class StartManualTransferRunsResponse(proto.Message): runs = proto.RepeatedField(proto.MESSAGE, number=1, message=transfer.TransferRun,) +class EnrollDataSourcesRequest(proto.Message): + r"""A request to enroll a set of data sources so they are visible in the + BigQuery UI's ``Transfer`` tab. + + Attributes: + name (str): + The name of the project resource in the form: + ``projects/{project_id}`` + data_source_ids (Sequence[str]): + Data sources that are enrolled. It is + required to provide at least one data source id. + """ + + name = proto.Field(proto.STRING, number=1,) + data_source_ids = proto.RepeatedField(proto.STRING, number=2,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 43292214..87acd6a3 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -27,6 +27,7 @@ "TransferState", "EmailPreferences", "ScheduleOptions", + "UserInfo", "TransferConfig", "TransferRun", "TransferMessage", @@ -98,6 +99,19 @@ class ScheduleOptions(proto.Message): end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) +class UserInfo(proto.Message): + r"""Information about a user. + + Attributes: + email (str): + E-mail address of the user. + + This field is a member of `oneof`_ ``_email``. + """ + + email = proto.Field(proto.STRING, number=1, optional=True,) + + class TransferConfig(proto.Message): r"""Represents a data transfer configuration. A transfer configuration contains all metadata needed to perform a data transfer. For @@ -133,7 +147,8 @@ class TransferConfig(proto.Message): up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: - https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + https://cloud.google.com/bigquery- + transfer/docs/cloud-storage-transfer#bq schedule (str): Data transfer schedule. If the data source does not support a custom schedule, this should be empty. If it is empty, the @@ -144,8 +159,10 @@ class TransferConfig(proto.Message): ``first sunday of quarter 00:00``. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - NOTE: the granularity should be at least 8 hours, or less - frequent. + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. @@ -184,6 +201,14 @@ class TransferConfig(proto.Message): Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config. + owner_info (google.cloud.bigquery_datatransfer_v1.types.UserInfo): + Output only. Information about the user whose credentials + are used to transfer data. Populated only for + ``transferConfigs.get`` requests. In case the user + information is not available, this field will not be + populated. + + This field is a member of `oneof`_ ``_owner_info``. """ name = proto.Field(proto.STRING, number=1,) @@ -206,6 +231,9 @@ class TransferConfig(proto.Message): email_preferences = proto.Field( proto.MESSAGE, number=18, message="EmailPreferences", ) + owner_info = proto.Field( + proto.MESSAGE, number=27, optional=True, message="UserInfo", + ) class TransferRun(proto.Message): diff --git a/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/scripts/fixup_bigquery_datatransfer_v1_keywords.py index 0db416f0..d26de659 100644 --- a/scripts/fixup_bigquery_datatransfer_v1_keywords.py +++ b/scripts/fixup_bigquery_datatransfer_v1_keywords.py @@ -43,6 +43,7 @@ class bigquery_datatransferCallTransformer(cst.CSTTransformer): 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), 'delete_transfer_config': ('name', ), 'delete_transfer_run': ('name', ), + 'enroll_data_sources': ('name', 'data_source_ids', ), 'get_data_source': ('name', ), 'get_transfer_config': ('name', ), 'get_transfer_run': ('name', ), diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 4e76586a..063a1ba0 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -4472,6 +4472,141 @@ async def test_check_valid_creds_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [datatransfer.EnrollDataSourcesRequest, dict,]) +def test_enroll_data_sources(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_enroll_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + client.enroll_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_enroll_data_sources_async( + transport: str = "grpc_asyncio", request_type=datatransfer.EnrollDataSourcesRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_enroll_data_sources_async_from_dict(): + await test_enroll_data_sources_async(request_type=dict) + + +def test_enroll_data_sources_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.EnrollDataSourcesRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + call.return_value = None + client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enroll_data_sources_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.EnrollDataSourcesRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataTransferServiceGrpcTransport( @@ -4585,6 +4720,7 @@ def test_data_transfer_service_base_transport(): "list_transfer_runs", "list_transfer_logs", "check_valid_creds", + "enroll_data_sources", ) for method in methods: with pytest.raises(NotImplementedError):