Skip to content

Commit

Permalink
docs: Add documentation for enums (#553)
Browse files Browse the repository at this point in the history
* docs: Add documentation for enums

fix: Add context manager return types

chore: Update gapic-generator-python to v1.8.1
PiperOrigin-RevId: 503210727

Source-Link: googleapis/googleapis@a391fd1

Source-Link: googleapis/googleapis-gen@0080f83
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* workaround docs issue

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Anthonios Partheniou <[email protected]>
  • Loading branch information
3 people authored Jan 20, 2023
1 parent 5635fe5 commit ec04714
Show file tree
Hide file tree
Showing 15 changed files with 281 additions and 20 deletions.
Binary file added bigquery-storage-v1-py.tar.gz
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -856,7 +856,7 @@ def sample_split_read_stream():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "BigQueryReadClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,7 @@ def sample_flush_rows():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "BigQueryWriteClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
12 changes: 11 additions & 1 deletion google/cloud/bigquery_storage_v1/types/arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,17 @@ class ArrowSerializationOptions(proto.Message):
"""

class CompressionCodec(proto.Enum):
r"""Compression codec's supported by Arrow."""
r"""Compression codec's supported by Arrow.
Values:
COMPRESSION_UNSPECIFIED (0):
If unspecified no compression will be used.
LZ4_FRAME (1):
LZ4 Frame
(https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md)
ZSTD (2):
Zstandard compression.
"""
COMPRESSION_UNSPECIFIED = 0
LZ4_FRAME = 1
ZSTD = 2
Expand Down
51 changes: 49 additions & 2 deletions google/cloud/bigquery_storage_v1/types/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,17 @@ class MissingValueInterpretation(proto.Enum):
values are fields present in user schema but missing in rows. A
missing value can represent a NULL or a column default value
defined in BigQuery table schema.
Values:
MISSING_VALUE_INTERPRETATION_UNSPECIFIED (0):
Invalid missing value interpretation.
Requests with this value will be rejected.
NULL_VALUE (1):
Missing value is interpreted as NULL.
DEFAULT_VALUE (2):
Missing value is interpreted as column
default value if declared in the table schema,
NULL otherwise.
"""
MISSING_VALUE_INTERPRETATION_UNSPECIFIED = 0
NULL_VALUE = 1
Expand Down Expand Up @@ -748,7 +759,36 @@ class StorageError(proto.Message):
"""

class StorageErrorCode(proto.Enum):
r"""Error code for ``StorageError``."""
r"""Error code for ``StorageError``.
Values:
STORAGE_ERROR_CODE_UNSPECIFIED (0):
Default error.
TABLE_NOT_FOUND (1):
Table is not found in the system.
STREAM_ALREADY_COMMITTED (2):
Stream is already committed.
STREAM_NOT_FOUND (3):
Stream is not found.
INVALID_STREAM_TYPE (4):
Invalid Stream type.
For example, you try to commit a stream that is
not pending.
INVALID_STREAM_STATE (5):
Invalid Stream state.
For example, you try to commit a stream that is
not finalized or is garbaged.
STREAM_FINALIZED (6):
Stream is finalized.
SCHEMA_MISMATCH_EXTRA_FIELDS (7):
There is a schema mismatch and it is caused
by user schema has extra field than bigquery
schema.
OFFSET_ALREADY_EXISTS (8):
Offset already exists.
OFFSET_OUT_OF_RANGE (9):
Offset out of range.
"""
STORAGE_ERROR_CODE_UNSPECIFIED = 0
TABLE_NOT_FOUND = 1
STREAM_ALREADY_COMMITTED = 2
Expand Down Expand Up @@ -789,7 +829,14 @@ class RowError(proto.Message):
"""

class RowErrorCode(proto.Enum):
r"""Error code for ``RowError``."""
r"""Error code for ``RowError``.
Values:
ROW_ERROR_CODE_UNSPECIFIED (0):
Default error.
FIELDS_ERROR (1):
One or more fields in the row has errors.
"""
ROW_ERROR_CODE_UNSPECIFIED = 0
FIELDS_ERROR = 1

Expand Down
56 changes: 53 additions & 3 deletions google/cloud/bigquery_storage_v1/types/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,20 @@


class DataFormat(proto.Enum):
r"""Data format for input or output data."""
r"""Data format for input or output data.
Values:
DATA_FORMAT_UNSPECIFIED (0):
Data format is unspecified.
AVRO (1):
Avro is a standard open source row based file
format. See https://avro.apache.org/ for more
details.
ARROW (2):
Arrow is a standard open source column-based
message format. See https://arrow.apache.org/
for more details.
"""
DATA_FORMAT_UNSPECIFIED = 0
AVRO = 1
ARROW = 2
Expand All @@ -45,6 +58,20 @@ class DataFormat(proto.Enum):
class WriteStreamView(proto.Enum):
r"""WriteStreamView is a view enum that controls what details
about a write stream should be returned.
Values:
WRITE_STREAM_VIEW_UNSPECIFIED (0):
The default / unset value.
BASIC (1):
The BASIC projection returns basic metadata
about a write stream. The basic view does not
include schema information. This is the default
view returned by GetWriteStream.
FULL (2):
The FULL projection returns all available
write stream metadata, including the schema.
CreateWriteStream returns the full projection of
write stream metadata.
"""
WRITE_STREAM_VIEW_UNSPECIFIED = 0
BASIC = 1
Expand Down Expand Up @@ -334,14 +361,37 @@ class WriteStream(proto.Message):
"""

class Type(proto.Enum):
r"""Type enum of the stream."""
r"""Type enum of the stream.
Values:
TYPE_UNSPECIFIED (0):
Unknown type.
COMMITTED (1):
Data will commit automatically and appear as
soon as the write is acknowledged.
PENDING (2):
Data is invisible until the stream is
committed.
BUFFERED (3):
Data is only visible up to the offset to
which it was flushed.
"""
TYPE_UNSPECIFIED = 0
COMMITTED = 1
PENDING = 2
BUFFERED = 3

class WriteMode(proto.Enum):
r"""Mode enum of the stream."""
r"""Mode enum of the stream.
Values:
WRITE_MODE_UNSPECIFIED (0):
Unknown type.
INSERT (1):
Insert new records into the table.
It is the default value if customers do not
specify it.
"""
WRITE_MODE_UNSPECIFIED = 0
INSERT = 1

Expand Down
51 changes: 49 additions & 2 deletions google/cloud/bigquery_storage_v1/types/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,43 @@ class TableFieldSchema(proto.Message):
"""

class Type(proto.Enum):
r""""""
r"""
Values:
TYPE_UNSPECIFIED (0):
Illegal value
STRING (1):
64K, UTF8
INT64 (2):
64-bit signed
DOUBLE (3):
64-bit IEEE floating point
STRUCT (4):
Aggregate type
BYTES (5):
64K, Binary
BOOL (6):
2-valued
TIMESTAMP (7):
64-bit signed usec since UTC epoch
DATE (8):
Civil date - Year, Month, Day
TIME (9):
Civil time - Hour, Minute, Second,
Microseconds
DATETIME (10):
Combination of civil date and civil time
GEOGRAPHY (11):
Geography object
NUMERIC (12):
Numeric value
BIGNUMERIC (13):
BigNumeric value
INTERVAL (14):
Interval
JSON (15):
JSON, String
"""
TYPE_UNSPECIFIED = 0
STRING = 1
INT64 = 2
Expand All @@ -142,7 +178,18 @@ class Type(proto.Enum):
JSON = 15

class Mode(proto.Enum):
r""""""
r"""
Values:
MODE_UNSPECIFIED (0):
NULLABLE (1):
REQUIRED (2):
REPEATED (3):
"""
MODE_UNSPECIFIED = 0
NULLABLE = 1
REQUIRED = 2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -859,7 +859,7 @@ def sample_split_read_stream():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "BigQueryReadClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1105,7 +1105,7 @@ def sample_flush_rows():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "BigQueryWriteClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
14 changes: 13 additions & 1 deletion google/cloud/bigquery_storage_v1beta2/types/arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,19 @@ class ArrowSerializationOptions(proto.Message):
"""

class Format(proto.Enum):
r"""The IPC format to use when serializing Arrow streams."""
r"""The IPC format to use when serializing Arrow streams.
Values:
FORMAT_UNSPECIFIED (0):
If unspecied the IPC format as of 0.15
release will be used.
ARROW_0_14 (1):
Use the legacy IPC message format as of
Apache Arrow Release 0.14.
ARROW_0_15 (2):
Use the message format as of Apache Arrow
Release 0.15.
"""
FORMAT_UNSPECIFIED = 0
ARROW_0_14 = 1
ARROW_0_15 = 2
Expand Down
23 changes: 22 additions & 1 deletion google/cloud/bigquery_storage_v1beta2/types/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -643,7 +643,28 @@ class StorageError(proto.Message):
"""

class StorageErrorCode(proto.Enum):
r"""Error code for ``StorageError``."""
r"""Error code for ``StorageError``.
Values:
STORAGE_ERROR_CODE_UNSPECIFIED (0):
Default error.
TABLE_NOT_FOUND (1):
Table is not found in the system.
STREAM_ALREADY_COMMITTED (2):
Stream is already committed.
STREAM_NOT_FOUND (3):
Stream is not found.
INVALID_STREAM_TYPE (4):
Invalid Stream type.
For example, you try to commit a stream that is
not pending.
INVALID_STREAM_STATE (5):
Invalid Stream state.
For example, you try to commit a stream that is
not finalized or is garbaged.
STREAM_FINALIZED (6):
Stream is finalized.
"""
STORAGE_ERROR_CODE_UNSPECIFIED = 0
TABLE_NOT_FOUND = 1
STREAM_ALREADY_COMMITTED = 2
Expand Down
31 changes: 29 additions & 2 deletions google/cloud/bigquery_storage_v1beta2/types/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,20 @@


class DataFormat(proto.Enum):
r"""Data format for input or output data."""
r"""Data format for input or output data.
Values:
DATA_FORMAT_UNSPECIFIED (0):
AVRO (1):
Avro is a standard open source row based file
format. See https://avro.apache.org/ for more
details.
ARROW (2):
Arrow is a standard open source column-based
message format. See https://arrow.apache.org/
for more details.
"""
DATA_FORMAT_UNSPECIFIED = 0
AVRO = 1
ARROW = 2
Expand Down Expand Up @@ -235,7 +248,21 @@ class WriteStream(proto.Message):
"""

class Type(proto.Enum):
r"""Type enum of the stream."""
r"""Type enum of the stream.
Values:
TYPE_UNSPECIFIED (0):
Unknown type.
COMMITTED (1):
Data will commit automatically and appear as
soon as the write is acknowledged.
PENDING (2):
Data is invisible until the stream is
committed.
BUFFERED (3):
Data is only visible up to the offset to
which it was flushed.
"""
TYPE_UNSPECIFIED = 0
COMMITTED = 1
PENDING = 2
Expand Down
Loading

0 comments on commit ec04714

Please sign in to comment.