Skip to content

Commit

Permalink
add protos
Browse files Browse the repository at this point in the history
  • Loading branch information
crwilcox committed Jan 22, 2019
1 parent 3475a84 commit 95f5935
Show file tree
Hide file tree
Showing 45 changed files with 11,881 additions and 0 deletions.
653 changes: 653 additions & 0 deletions bigquery_datatransfer/protos/datatransfer.proto

Large diffs are not rendered by default.

222 changes: 222 additions & 0 deletions bigquery_datatransfer/protos/transfer.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

syntax = "proto3";

package google.cloud.bigquery.datatransfer.v1;

import "google/api/annotations.proto";
import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";

option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
option java_multiple_files = true;
option java_outer_classname = "TransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option objc_class_prefix = "GCBDT";
option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";


// Represents a data transfer configuration. A transfer configuration
// contains all metadata needed to perform a data transfer. For example,
// `destination_dataset_id` specifies where data should be stored.
// When a new transfer configuration is created, the specified
// `destination_dataset_id` is created when needed and shared with the
// appropriate data source service account.
message TransferConfig {
// The resource name of the transfer config.
// Transfer config names have the form
// `projects/{project_id}/transferConfigs/{config_id}`.
// Where `config_id` is usually a uuid, even though it is not
// guaranteed or required. The name is ignored when creating a transfer
// config.
string name = 1;

// The BigQuery target dataset id.
string destination_dataset_id = 2;

// User specified display name for the data transfer.
string display_name = 3;

// Data source id. Cannot be changed once data transfer is created.
string data_source_id = 5;

// Data transfer specific parameters.
google.protobuf.Struct params = 9;

// Data transfer schedule.
// If the data source does not support a custom schedule, this should be
// empty. If it is empty, the default value for the data source will be
// used.
// The specified times are in UTC.
// Examples of valid format:
// `1st,3rd monday of month 15:30`,
// `every wed,fri of jan,jun 13:15`, and
// `first sunday of quarter 00:00`.
// See more explanation about the format here:
// https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
// NOTE: the granularity should be at least 8 hours, or less frequent.
string schedule = 7;

// The number of days to look back to automatically refresh the data.
// For example, if `data_refresh_window_days = 10`, then every day
// BigQuery reingests data for [today-10, today-1], rather than ingesting data
// for just [today-1].
// Only valid if the data source supports the feature. Set the value to 0
// to use the default value.
int32 data_refresh_window_days = 12;

// Is this config disabled. When set to true, no runs are scheduled
// for a given transfer.
bool disabled = 13;

// Output only. Data transfer modification time. Ignored by server on input.
google.protobuf.Timestamp update_time = 4;

// Output only. Next time when data transfer will run.
google.protobuf.Timestamp next_run_time = 8;

// Output only. State of the most recently updated transfer run.
TransferState state = 10;

// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;

// Output only. Region in which BigQuery dataset is located.
string dataset_region = 14;
}

// Represents a data transfer run.
message TransferRun {
// The resource name of the transfer run.
// Transfer run names have the form
// `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
// The name is ignored when creating a transfer run.
string name = 1;

// Minimum time after which a transfer run can be started.
google.protobuf.Timestamp schedule_time = 3;

// For batch transfer runs, specifies the date and time that
// data should be ingested.
google.protobuf.Timestamp run_time = 10;

// Status of the transfer run.
google.rpc.Status error_status = 21;

// Output only. Time when transfer run was started.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp start_time = 4;

// Output only. Time when transfer run ended.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp end_time = 5;

// Output only. Last time the data transfer run state was updated.
google.protobuf.Timestamp update_time = 6;

// Output only. Data transfer specific parameters.
google.protobuf.Struct params = 9;

// Output only. The BigQuery target dataset id.
string destination_dataset_id = 2;

// Output only. Data source id.
string data_source_id = 7;

// Data transfer run state. Ignored for input requests.
TransferState state = 8;

// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;

// Output only. Describes the schedule of this transfer run if it was
// created as part of a regular schedule. For batch transfer runs that are
// scheduled manually, this is empty.
// NOTE: the system might choose to delay the schedule depending on the
// current load, so `schedule_time` doesn't always matches this.
string schedule = 12;
}

// Represents a user facing message for a particular data transfer run.
message TransferMessage {
// Represents data transfer user facing message severity.
enum MessageSeverity {
// No severity specified.
MESSAGE_SEVERITY_UNSPECIFIED = 0;

// Informational message.
INFO = 1;

// Warning message.
WARNING = 2;

// Error message.
ERROR = 3;
}

// Time when message was logged.
google.protobuf.Timestamp message_time = 1;

// Message severity.
MessageSeverity severity = 2;

// Message text.
string message_text = 3;
}

// DEPRECATED. Represents data transfer type.
enum TransferType {
// Invalid or Unknown transfer type placeholder.
TRANSFER_TYPE_UNSPECIFIED = 0;

// Batch data transfer.
BATCH = 1;

// Streaming data transfer. Streaming data source currently doesn't
// support multiple transfer configs per project.
STREAMING = 2;
}

// Represents data transfer run state.
enum TransferState {
// State placeholder.
TRANSFER_STATE_UNSPECIFIED = 0;

// Data transfer is scheduled and is waiting to be picked up by
// data transfer backend.
PENDING = 2;

// Data transfer is in progress.
RUNNING = 3;

// Data transfer completed successsfully.
SUCCEEDED = 4;

// Data transfer failed.
FAILED = 5;

// Data transfer is cancelled.
CANCELLED = 6;
}
39 changes: 39 additions & 0 deletions bigquery_storage/protos/avro.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
// Copyright 2018 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//

syntax = "proto3";

package google.cloud.bigquery.storage.v1beta1;

option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage";
option java_outer_classname = "AvroProto";
option java_package = "com.google.cloud.bigquery.storage.v1beta1";


// Avro schema.
message AvroSchema {
// Json serialized schema, as described at
// https://avro.apache.org/docs/1.8.1/spec.html
string schema = 1;
}

// Avro rows.
message AvroRows {
// Binary serialized rows in a block.
bytes serialized_binary_rows = 1;

// The count of rows in the returning block.
int64 row_count = 2;
}
39 changes: 39 additions & 0 deletions bigquery_storage/protos/read_options.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
// Copyright 2018 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//

syntax = "proto3";

package google.cloud.bigquery.storage.v1beta1;

option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage";
option java_package = "com.google.cloud.bigquery.storage.v1beta1";


// Options dictating how we read a table.
message TableReadOptions {
// Optional. Names of the fields in the table that should be read. If empty,
// all fields will be read. If the specified field is a nested field, all the
// sub-fields in the field will be selected. The output field order is
// unrelated to the order of fields in selected_fields.
repeated string selected_fields = 1;

// Optional. SQL text filtering statement, similar to a WHERE clause in
// a query. Currently, we support combinations of predicates that are
// a comparison between a column and a constant value in SQL statement.
// Aggregates are not supported.
//
// Example: "a > DATE '2014-9-27' AND (b > 5 and C LIKE 'date')"
string row_restriction = 2;
}
Loading

0 comments on commit 95f5935

Please sign in to comment.