From 82acfb80bed977b6316439d2353843225c9784d9 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 Jan 2019 13:28:49 -0800 Subject: [PATCH] Add protos as an artifact to library (#7205) --- .../proto/datatransfer.proto | 653 ++++++++++++++++++ .../proto/transfer.proto | 222 ++++++ .../synth.metadata | 12 +- .../synth.py | 3 +- 4 files changed, 883 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto create mode 100644 packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto new file mode 100644 index 000000000000..548256110ca9 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto @@ -0,0 +1,653 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.datatransfer.v1; + +import "google/api/annotations.proto"; +import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; +option java_multiple_files = true; +option java_outer_classname = "DataTransferProto"; +option java_package = "com.google.cloud.bigquery.datatransfer.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; + + +// The Google BigQuery Data Transfer Service API enables BigQuery users to +// configure the transfer of their data from other Google Products into BigQuery. +// This service contains methods that are end user exposed. It backs up the +// frontend. +service DataTransferService { + // Retrieves a supported data source and returns its settings, + // which can be used for UI rendering. + rpc GetDataSource(GetDataSourceRequest) returns (DataSource) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/dataSources/*}" + additional_bindings { + get: "/v1/{name=projects/*/dataSources/*}" + } + }; + } + + // Lists supported data sources and returns their settings, + // which can be used for UI rendering. + rpc ListDataSources(ListDataSourcesRequest) returns (ListDataSourcesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/dataSources" + additional_bindings { + get: "/v1/{parent=projects/*}/dataSources" + } + }; + } + + // Creates a new data transfer configuration. + rpc CreateTransferConfig(CreateTransferConfigRequest) returns (TransferConfig) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/transferConfigs" + body: "transfer_config" + additional_bindings { + post: "/v1/{parent=projects/*}/transferConfigs" + body: "transfer_config" + } + }; + } + + // Updates a data transfer configuration. + // All fields must be set, even if they are not updated. + rpc UpdateTransferConfig(UpdateTransferConfigRequest) returns (TransferConfig) { + option (google.api.http) = { + patch: "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}" + body: "transfer_config" + additional_bindings { + patch: "/v1/{transfer_config.name=projects/*/transferConfigs/*}" + body: "transfer_config" + } + }; + } + + // Deletes a data transfer configuration, + // including any associated transfer runs and logs. + rpc DeleteTransferConfig(DeleteTransferConfigRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}" + additional_bindings { + delete: "/v1/{name=projects/*/transferConfigs/*}" + } + }; + } + + // Returns information about a data transfer config. + rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/transferConfigs/*}" + additional_bindings { + get: "/v1/{name=projects/*/transferConfigs/*}" + } + }; + } + + // Returns information about all data transfers in the project. + rpc ListTransferConfigs(ListTransferConfigsRequest) returns (ListTransferConfigsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/transferConfigs" + additional_bindings { + get: "/v1/{parent=projects/*}/transferConfigs" + } + }; + } + + // Creates transfer runs for a time range [start_time, end_time]. + // For each date - or whatever granularity the data source supports - in the + // range, one transfer run is created. + // Note that runs are created per UTC time in the time range. + rpc ScheduleTransferRuns(ScheduleTransferRunsRequest) returns (ScheduleTransferRunsResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns" + body: "*" + additional_bindings { + post: "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns" + body: "*" + } + }; + } + + // Returns information about the particular transfer run. + rpc GetTransferRun(GetTransferRunRequest) returns (TransferRun) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + additional_bindings { + get: "/v1/{name=projects/*/transferConfigs/*/runs/*}" + } + }; + } + + // Deletes the specified transfer run. + rpc DeleteTransferRun(DeleteTransferRunRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + additional_bindings { + delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}" + } + }; + } + + // Returns information about running and completed jobs. + rpc ListTransferRuns(ListTransferRunsRequest) returns (ListTransferRunsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs" + additional_bindings { + get: "/v1/{parent=projects/*/transferConfigs/*}/runs" + } + }; + } + + // Returns user facing log messages for the data transfer run. + rpc ListTransferLogs(ListTransferLogsRequest) returns (ListTransferLogsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs" + additional_bindings { + get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" + } + }; + } + + // Returns true if valid credentials exist for the given data source and + // requesting user. + // Some data sources doesn't support service account, so we need to talk to + // them on behalf of the end user. This API just checks whether we have OAuth + // token for the particular user, which is a pre-requisite before user can + // create a transfer config. + rpc CheckValidCreds(CheckValidCredsRequest) returns (CheckValidCredsResponse) { + option (google.api.http) = { + post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds" + body: "*" + additional_bindings { + post: "/v1/{name=projects/*/dataSources/*}:checkValidCreds" + body: "*" + } + }; + } +} + +// Represents a data source parameter with validation rules, so that +// parameters can be rendered in the UI. These parameters are given to us by +// supported data sources, and include all needed information for rendering +// and validation. +// Thus, whoever uses this api can decide to generate either generic ui, +// or custom data source specific forms. +message DataSourceParameter { + // Parameter type. + enum Type { + // Type unspecified. + TYPE_UNSPECIFIED = 0; + + // String parameter. + STRING = 1; + + // Integer parameter (64-bits). + // Will be serialized to json as string. + INTEGER = 2; + + // Double precision floating point parameter. + DOUBLE = 3; + + // Boolean parameter. + BOOLEAN = 4; + + // Record parameter. + RECORD = 5; + + // Page ID for a Google+ Page. + PLUS_PAGE = 6; + } + + // Parameter identifier. + string param_id = 1; + + // Parameter display name in the user interface. + string display_name = 2; + + // Parameter description. + string description = 3; + + // Parameter type. + Type type = 4; + + // Is parameter required. + bool required = 5; + + // Can parameter have multiple values. + bool repeated = 6; + + // Regular expression which can be used for parameter validation. + string validation_regex = 7; + + // All possible values for the parameter. + repeated string allowed_values = 8; + + // For integer and double values specifies minimum allowed value. + google.protobuf.DoubleValue min_value = 9; + + // For integer and double values specifies maxminum allowed value. + google.protobuf.DoubleValue max_value = 10; + + // When parameter is a record, describes child fields. + repeated DataSourceParameter fields = 11; + + // Description of the requirements for this field, in case the user input does + // not fulfill the regex pattern or min/max values. + string validation_description = 12; + + // URL to a help document to further explain the naming requirements. + string validation_help_url = 13; + + // Cannot be changed after initial creation. + bool immutable = 14; + + // If set to true, schema should be taken from the parent with the same + // parameter_id. Only applicable when parameter type is RECORD. + bool recurse = 15; +} + +// Represents data source metadata. Metadata is sufficient to +// render UI and request proper OAuth tokens. +message DataSource { + // The type of authorization needed for this data source. + enum AuthorizationType { + // Type unspecified. + AUTHORIZATION_TYPE_UNSPECIFIED = 0; + + // Use OAuth 2 authorization codes that can be exchanged + // for a refresh token on the backend. + AUTHORIZATION_CODE = 1; + + // Return an authorization code for a given Google+ page that can then be + // exchanged for a refresh token on the backend. + GOOGLE_PLUS_AUTHORIZATION_CODE = 2; + } + + // Represents how the data source supports data auto refresh. + enum DataRefreshType { + // The data source won't support data auto refresh, which is default value. + DATA_REFRESH_TYPE_UNSPECIFIED = 0; + + // The data source supports data auto refresh, and runs will be scheduled + // for the past few days. Does not allow custom values to be set for each + // transfer config. + SLIDING_WINDOW = 1; + + // The data source supports data auto refresh, and runs will be scheduled + // for the past few days. Allows custom values to be set for each transfer + // config. + CUSTOM_SLIDING_WINDOW = 2; + } + + // Output only. Data source resource name. + string name = 1; + + // Data source id. + string data_source_id = 2; + + // User friendly data source name. + string display_name = 3; + + // User friendly data source description string. + string description = 4; + + // Data source client id which should be used to receive refresh token. + // When not supplied, no offline credentials are populated for data transfer. + string client_id = 5; + + // Api auth scopes for which refresh token needs to be obtained. Only valid + // when `client_id` is specified. Ignored otherwise. These are scopes needed + // by a data source to prepare data and ingest them into BigQuery, + // e.g., https://www.googleapis.com/auth/bigquery + repeated string scopes = 6; + + // Deprecated. This field has no effect. + TransferType transfer_type = 7; + + // Indicates whether the data source supports multiple transfers + // to different BigQuery targets. + bool supports_multiple_transfers = 8; + + // The number of seconds to wait for an update from the data source + // before BigQuery marks the transfer as failed. + int32 update_deadline_seconds = 9; + + // Default data transfer schedule. + // Examples of valid schedules include: + // `1st,3rd monday of month 15:30`, + // `every wed,fri of jan,jun 13:15`, and + // `first sunday of quarter 00:00`. + string default_schedule = 10; + + // Specifies whether the data source supports a user defined schedule, or + // operates on the default schedule. + // When set to `true`, user can override default schedule. + bool supports_custom_schedule = 11; + + // Data source parameters. + repeated DataSourceParameter parameters = 12; + + // Url for the help document for this data source. + string help_url = 13; + + // Indicates the type of authorization. + AuthorizationType authorization_type = 14; + + // Specifies whether the data source supports automatic data refresh for the + // past few days, and how it's supported. + // For some data sources, data might not be complete until a few days later, + // so it's useful to refresh data automatically. + DataRefreshType data_refresh_type = 15; + + // Default data refresh window on days. + // Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`. + int32 default_data_refresh_window_days = 16; + + // Disables backfilling and manual run scheduling + // for the data source. + bool manual_runs_disabled = 17; + + // The minimum interval for scheduler to schedule runs. + google.protobuf.Duration minimum_schedule_interval = 18; +} + +// A request to get data source info. +message GetDataSourceRequest { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/dataSources/{data_source_id}` + string name = 1; +} + +// Request to list supported data sources and their data transfer settings. +message ListDataSourcesRequest { + // The BigQuery project id for which data sources should be returned. + // Must be in the form: `projects/{project_id}` + string parent = 1; + + // Pagination token, which can be used to request a specific page + // of `ListDataSourcesRequest` list results. For multiple-page + // results, `ListDataSourcesResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + string page_token = 3; + + // Page size. The default page size is the maximum value of 1000 results. + int32 page_size = 4; +} + +// Returns list of supported data sources and their metadata. +message ListDataSourcesResponse { + // List of supported data sources and their transfer settings. + repeated DataSource data_sources = 1; + + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListDataSourcesRequest.page_token` + // to request the next page of list results. + string next_page_token = 2; +} + +// A request to create a data transfer configuration. If new credentials are +// needed for this transfer configuration, an authorization code must be +// provided. If an authorization code is provided, the transfer configuration +// will be associated with the user id corresponding to the +// authorization code. Otherwise, the transfer configuration will be associated +// with the calling user. +message CreateTransferConfigRequest { + // The BigQuery project id where the transfer configuration should be created. + // Must be in the format /projects/{project_id}/locations/{location_id} + // If specified location and location of the destination bigquery dataset + // do not match - the request will fail. + string parent = 1; + + // Data transfer configuration to create. + TransferConfig transfer_config = 2; + + // Optional OAuth2 authorization code to use with this transfer configuration. + // This is required if new credentials are needed, as indicated by + // `CheckValidCreds`. + // In order to obtain authorization_code, please make a + // request to + // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + // + // * client_id should be OAuth client_id of BigQuery DTS API for the given + // data source returned by ListDataSources method. + // * data_source_scopes are the scopes returned by ListDataSources method. + // * redirect_uri is an optional parameter. If not specified, then + // authorization code is posted to the opener of authorization flow window. + // Otherwise it will be sent to the redirect uri. A special value of + // urn:ietf:wg:oauth:2.0:oob means that authorization code should be + // returned in the title bar of the browser, with the page text prompting + // the user to copy the code and paste it in the application. + string authorization_code = 3; +} + +// A request to update a transfer configuration. To update the user id of the +// transfer configuration, an authorization code needs to be provided. +message UpdateTransferConfigRequest { + // Data transfer configuration to create. + TransferConfig transfer_config = 1; + + // Optional OAuth2 authorization code to use with this transfer configuration. + // If it is provided, the transfer configuration will be associated with the + // authorizing user. + // In order to obtain authorization_code, please make a + // request to + // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + // + // * client_id should be OAuth client_id of BigQuery DTS API for the given + // data source returned by ListDataSources method. + // * data_source_scopes are the scopes returned by ListDataSources method. + // * redirect_uri is an optional parameter. If not specified, then + // authorization code is posted to the opener of authorization flow window. + // Otherwise it will be sent to the redirect uri. A special value of + // urn:ietf:wg:oauth:2.0:oob means that authorization code should be + // returned in the title bar of the browser, with the page text prompting + // the user to copy the code and paste it in the application. + string authorization_code = 3; + + // Required list of fields to be updated in this request. + google.protobuf.FieldMask update_mask = 4; +} + +// A request to get data transfer information. +message GetTransferConfigRequest { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}` + string name = 1; +} + +// A request to delete data transfer information. All associated transfer runs +// and log messages will be deleted as well. +message DeleteTransferConfigRequest { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}` + string name = 1; +} + +// A request to get data transfer run information. +message GetTransferRunRequest { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + string name = 1; +} + +// A request to delete data transfer run information. +message DeleteTransferRunRequest { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + string name = 1; +} + +// A request to list data transfers configured for a BigQuery project. +message ListTransferConfigsRequest { + // The BigQuery project id for which data sources + // should be returned: `projects/{project_id}`. + string parent = 1; + + // When specified, only configurations of requested data sources are returned. + repeated string data_source_ids = 2; + + // Pagination token, which can be used to request a specific page + // of `ListTransfersRequest` list results. For multiple-page + // results, `ListTransfersResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + string page_token = 3; + + // Page size. The default page size is the maximum value of 1000 results. + int32 page_size = 4; +} + +// The returned list of pipelines in the project. +message ListTransferConfigsResponse { + // Output only. The stored pipeline transfer configurations. + repeated TransferConfig transfer_configs = 1; + + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListTransferConfigsRequest.page_token` + // to request the next page of list results. + string next_page_token = 2; +} + +// A request to list data transfer runs. UI can use this method to show/filter +// specific data transfer runs. The data source can use this method to request +// all scheduled transfer runs. +message ListTransferRunsRequest { + // Represents which runs should be pulled. + enum RunAttempt { + // All runs should be returned. + RUN_ATTEMPT_UNSPECIFIED = 0; + + // Only latest run per day should be returned. + LATEST = 1; + } + + // Name of transfer configuration for which transfer runs should be retrieved. + // Format of transfer configuration resource name is: + // `projects/{project_id}/transferConfigs/{config_id}`. + string parent = 1; + + // When specified, only transfer runs with requested states are returned. + repeated TransferState states = 2; + + // Pagination token, which can be used to request a specific page + // of `ListTransferRunsRequest` list results. For multiple-page + // results, `ListTransferRunsResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + string page_token = 3; + + // Page size. The default page size is the maximum value of 1000 results. + int32 page_size = 4; + + // Indicates how run attempts are to be pulled. + RunAttempt run_attempt = 5; +} + +// The returned list of pipelines in the project. +message ListTransferRunsResponse { + // Output only. The stored pipeline transfer runs. + repeated TransferRun transfer_runs = 1; + + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListTransferRunsRequest.page_token` + // to request the next page of list results. + string next_page_token = 2; +} + +// A request to get user facing log messages associated with data transfer run. +message ListTransferLogsRequest { + // Transfer run name in the form: + // `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`. + string parent = 1; + + // Pagination token, which can be used to request a specific page + // of `ListTransferLogsRequest` list results. For multiple-page + // results, `ListTransferLogsResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + string page_token = 4; + + // Page size. The default page size is the maximum value of 1000 results. + int32 page_size = 5; + + // Message types to return. If not populated - INFO, WARNING and ERROR + // messages are returned. + repeated TransferMessage.MessageSeverity message_types = 6; +} + +// The returned list transfer run messages. +message ListTransferLogsResponse { + // Output only. The stored pipeline transfer messages. + repeated TransferMessage transfer_messages = 1; + + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `GetTransferRunLogRequest.page_token` + // to request the next page of list results. + string next_page_token = 2; +} + +// A request to determine whether the user has valid credentials. This method +// is used to limit the number of OAuth popups in the user interface. The +// user id is inferred from the API call context. +// If the data source has the Google+ authorization type, this method +// returns false, as it cannot be determined whether the credentials are +// already valid merely based on the user id. +message CheckValidCredsRequest { + // The data source in the form: + // `projects/{project_id}/dataSources/{data_source_id}` + string name = 1; +} + +// A response indicating whether the credentials exist and are valid. +message CheckValidCredsResponse { + // If set to `true`, the credentials exist and are valid. + bool has_valid_creds = 1; +} + +// A request to schedule transfer runs for a time range. +message ScheduleTransferRunsRequest { + // Transfer configuration name in the form: + // `projects/{project_id}/transferConfigs/{config_id}`. + string parent = 1; + + // Start time of the range of transfer runs. For example, + // `"2017-05-25T00:00:00+00:00"`. + google.protobuf.Timestamp start_time = 2; + + // End time of the range of transfer runs. For example, + // `"2017-05-30T00:00:00+00:00"`. + google.protobuf.Timestamp end_time = 3; +} + +// A response to schedule transfer runs for a time range. +message ScheduleTransferRunsResponse { + // The transfer runs that were scheduled. + repeated TransferRun runs = 1; +} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto new file mode 100644 index 000000000000..0cadeed5b9d1 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto @@ -0,0 +1,222 @@ +// Copyright 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.datatransfer.v1; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; +option java_multiple_files = true; +option java_outer_classname = "TransferProto"; +option java_package = "com.google.cloud.bigquery.datatransfer.v1"; +option objc_class_prefix = "GCBDT"; +option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; + + +// Represents a data transfer configuration. A transfer configuration +// contains all metadata needed to perform a data transfer. For example, +// `destination_dataset_id` specifies where data should be stored. +// When a new transfer configuration is created, the specified +// `destination_dataset_id` is created when needed and shared with the +// appropriate data source service account. +message TransferConfig { + // The resource name of the transfer config. + // Transfer config names have the form + // `projects/{project_id}/transferConfigs/{config_id}`. + // Where `config_id` is usually a uuid, even though it is not + // guaranteed or required. The name is ignored when creating a transfer + // config. + string name = 1; + + // The BigQuery target dataset id. + string destination_dataset_id = 2; + + // User specified display name for the data transfer. + string display_name = 3; + + // Data source id. Cannot be changed once data transfer is created. + string data_source_id = 5; + + // Data transfer specific parameters. + google.protobuf.Struct params = 9; + + // Data transfer schedule. + // If the data source does not support a custom schedule, this should be + // empty. If it is empty, the default value for the data source will be + // used. + // The specified times are in UTC. + // Examples of valid format: + // `1st,3rd monday of month 15:30`, + // `every wed,fri of jan,jun 13:15`, and + // `first sunday of quarter 00:00`. + // See more explanation about the format here: + // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + // NOTE: the granularity should be at least 8 hours, or less frequent. + string schedule = 7; + + // The number of days to look back to automatically refresh the data. + // For example, if `data_refresh_window_days = 10`, then every day + // BigQuery reingests data for [today-10, today-1], rather than ingesting data + // for just [today-1]. + // Only valid if the data source supports the feature. Set the value to 0 + // to use the default value. + int32 data_refresh_window_days = 12; + + // Is this config disabled. When set to true, no runs are scheduled + // for a given transfer. + bool disabled = 13; + + // Output only. Data transfer modification time. Ignored by server on input. + google.protobuf.Timestamp update_time = 4; + + // Output only. Next time when data transfer will run. + google.protobuf.Timestamp next_run_time = 8; + + // Output only. State of the most recently updated transfer run. + TransferState state = 10; + + // Output only. Unique ID of the user on whose behalf transfer is done. + // Applicable only to data sources that do not support service accounts. + // When set to 0, the data source service account credentials are used. + // May be negative. Note, that this identifier is not stable. + // It may change over time even for the same user. + int64 user_id = 11; + + // Output only. Region in which BigQuery dataset is located. + string dataset_region = 14; +} + +// Represents a data transfer run. +message TransferRun { + // The resource name of the transfer run. + // Transfer run names have the form + // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. + // The name is ignored when creating a transfer run. + string name = 1; + + // Minimum time after which a transfer run can be started. + google.protobuf.Timestamp schedule_time = 3; + + // For batch transfer runs, specifies the date and time that + // data should be ingested. + google.protobuf.Timestamp run_time = 10; + + // Status of the transfer run. + google.rpc.Status error_status = 21; + + // Output only. Time when transfer run was started. + // Parameter ignored by server for input requests. + google.protobuf.Timestamp start_time = 4; + + // Output only. Time when transfer run ended. + // Parameter ignored by server for input requests. + google.protobuf.Timestamp end_time = 5; + + // Output only. Last time the data transfer run state was updated. + google.protobuf.Timestamp update_time = 6; + + // Output only. Data transfer specific parameters. + google.protobuf.Struct params = 9; + + // Output only. The BigQuery target dataset id. + string destination_dataset_id = 2; + + // Output only. Data source id. + string data_source_id = 7; + + // Data transfer run state. Ignored for input requests. + TransferState state = 8; + + // Output only. Unique ID of the user on whose behalf transfer is done. + // Applicable only to data sources that do not support service accounts. + // When set to 0, the data source service account credentials are used. + // May be negative. Note, that this identifier is not stable. + // It may change over time even for the same user. + int64 user_id = 11; + + // Output only. Describes the schedule of this transfer run if it was + // created as part of a regular schedule. For batch transfer runs that are + // scheduled manually, this is empty. + // NOTE: the system might choose to delay the schedule depending on the + // current load, so `schedule_time` doesn't always matches this. + string schedule = 12; +} + +// Represents a user facing message for a particular data transfer run. +message TransferMessage { + // Represents data transfer user facing message severity. + enum MessageSeverity { + // No severity specified. + MESSAGE_SEVERITY_UNSPECIFIED = 0; + + // Informational message. + INFO = 1; + + // Warning message. + WARNING = 2; + + // Error message. + ERROR = 3; + } + + // Time when message was logged. + google.protobuf.Timestamp message_time = 1; + + // Message severity. + MessageSeverity severity = 2; + + // Message text. + string message_text = 3; +} + +// DEPRECATED. Represents data transfer type. +enum TransferType { + // Invalid or Unknown transfer type placeholder. + TRANSFER_TYPE_UNSPECIFIED = 0; + + // Batch data transfer. + BATCH = 1; + + // Streaming data transfer. Streaming data source currently doesn't + // support multiple transfer configs per project. + STREAMING = 2; +} + +// Represents data transfer run state. +enum TransferState { + // State placeholder. + TRANSFER_STATE_UNSPECIFIED = 0; + + // Data transfer is scheduled and is waiting to be picked up by + // data transfer backend. + PENDING = 2; + + // Data transfer is in progress. + RUNNING = 3; + + // Data transfer completed successsfully. + SUCCEEDED = 4; + + // Data transfer failed. + FAILED = 5; + + // Data transfer is cancelled. + CANCELLED = 6; +} diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index a3781caccd3a..45829dbc8457 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-17T13:12:30.477136Z", + "updateTime": "2019-01-23T22:00:39.365486Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.6", - "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" + "version": "0.16.7", + "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", - "internalRef": "229626798" + "sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e", + "internalRef": "230568136" } }, { @@ -28,7 +28,7 @@ { "client": { "source": "googleapis", - "apiName": "bigquery-datatransfer", + "apiName": "bigquery_datatransfer", "apiVersion": "v1", "language": "python", "generator": "gapic", diff --git a/packages/google-cloud-bigquery-datatransfer/synth.py b/packages/google-cloud-bigquery-datatransfer/synth.py index 290bdcbfb8e2..b569d54cd4e0 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.py +++ b/packages/google-cloud-bigquery-datatransfer/synth.py @@ -25,11 +25,12 @@ # Generate bigquery_datatransfer GAPIC layer # ---------------------------------------------------------------------------- library = gapic.py_library( - "bigquery-datatransfer", + "bigquery_datatransfer", version, config_path="/google/cloud/bigquery/datatransfer/" "artman_bigquerydatatransfer.yaml", artman_output_name="bigquerydatatransfer-v1", + include_protos=True, ) s.move(