Skip to content

Commit

Permalink
fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#539)
Browse files Browse the repository at this point in the history
* fix(deps): Require google-api-core >=1.34.0, >=2.11.0

fix: Drop usage of pkg_resources

fix: Fix timeout default values

docs(samples): Snippetgen should call await on the operation coroutine before calling result

PiperOrigin-RevId: 493260409

Source-Link: googleapis/googleapis@fea4387

Source-Link: googleapis/googleapis-gen@387b734
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* fix(deps): require google-api-core>=1.34.0,>=2.11.0

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Anthonios Partheniou <[email protected]>
  • Loading branch information
3 people authored Dec 7, 2022
1 parent ad62c22 commit 5e2fc1e
Show file tree
Hide file tree
Showing 15 changed files with 98 additions and 151 deletions.
5 changes: 0 additions & 5 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,3 @@ exclude_lines =
pragma: NO COVER
# Ignore debug-only repr
def __repr__
# Ignore pkg_resources exceptions.
# This is added at the module level as a safeguard for if someone
# generates the code and tries to run it without pip installing. This
# makes it virtually impossible to test properly.
except pkg_resources.DistributionNotFound
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
Type,
Union,
)
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
Expand Down Expand Up @@ -227,7 +228,7 @@ async def create_read_session(
read_session: Optional[stream.ReadSession] = None,
max_stream_count: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.ReadSession:
r"""Creates a new read session. A read session divides
Expand Down Expand Up @@ -389,7 +390,7 @@ def read_rows(
read_stream: Optional[str] = None,
offset: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]:
r"""Reads rows from the stream in the format prescribed
Expand Down Expand Up @@ -518,7 +519,7 @@ async def split_read_stream(
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.SplitReadStreamResponse:
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
Expand Down Expand Up @@ -620,14 +621,9 @@ async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()


try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


__all__ = ("BigQueryReadAsyncClient",)
20 changes: 8 additions & 12 deletions google/cloud/bigquery_storage_v1/services/big_query_read/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
Union,
cast,
)
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
Expand Down Expand Up @@ -497,7 +498,7 @@ def create_read_session(
read_session: Optional[stream.ReadSession] = None,
max_stream_count: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.ReadSession:
r"""Creates a new read session. A read session divides
Expand Down Expand Up @@ -649,7 +650,7 @@ def read_rows(
read_stream: Optional[str] = None,
offset: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Iterable[storage.ReadRowsResponse]:
r"""Reads rows from the stream in the format prescribed
Expand Down Expand Up @@ -769,7 +770,7 @@ def split_read_stream(
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.SplitReadStreamResponse:
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
Expand Down Expand Up @@ -869,14 +870,9 @@ def __exit__(self, type, value, traceback):
self.transport.close()


try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


__all__ = ("BigQueryReadClient",)
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

import google.auth # type: ignore
import google.api_core
Expand All @@ -28,14 +29,9 @@
from google.cloud.bigquery_storage_v1.types import storage
from google.cloud.bigquery_storage_v1.types import stream

try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


class BigQueryReadTransport(abc.ABC):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
Type,
Union,
)
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
Expand Down Expand Up @@ -231,7 +232,7 @@ async def create_write_stream(
parent: Optional[str] = None,
write_stream: Optional[stream.WriteStream] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.WriteStream:
r"""Creates a write stream to the given table. Additionally, every
Expand Down Expand Up @@ -358,7 +359,7 @@ def append_rows(
requests: Optional[AsyncIterator[storage.AppendRowsRequest]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Awaitable[AsyncIterable[storage.AppendRowsResponse]]:
r"""Appends data to the given stream.
Expand Down Expand Up @@ -492,7 +493,7 @@ async def get_write_stream(
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.WriteStream:
r"""Gets information about a write stream.
Expand Down Expand Up @@ -605,7 +606,7 @@ async def finalize_write_stream(
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.FinalizeWriteStreamResponse:
r"""Finalize a write stream so that no new data can be appended to
Expand Down Expand Up @@ -716,7 +717,7 @@ async def batch_commit_write_streams(
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.BatchCommitWriteStreamsResponse:
r"""Atomically commits a group of ``PENDING`` streams that belong to
Expand Down Expand Up @@ -833,7 +834,7 @@ async def flush_rows(
*,
write_stream: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.FlushRowsResponse:
r"""Flushes rows to a BUFFERED stream.
Expand Down Expand Up @@ -954,14 +955,9 @@ async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()


try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


__all__ = ("BigQueryWriteAsyncClient",)
26 changes: 11 additions & 15 deletions google/cloud/bigquery_storage_v1/services/big_query_write/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
Union,
cast,
)
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
Expand Down Expand Up @@ -477,7 +478,7 @@ def create_write_stream(
parent: Optional[str] = None,
write_stream: Optional[stream.WriteStream] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.WriteStream:
r"""Creates a write stream to the given table. Additionally, every
Expand Down Expand Up @@ -593,7 +594,7 @@ def append_rows(
requests: Optional[Iterator[storage.AppendRowsRequest]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Iterable[storage.AppendRowsResponse]:
r"""Appends data to the given stream.
Expand Down Expand Up @@ -714,7 +715,7 @@ def get_write_stream(
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.WriteStream:
r"""Gets information about a write stream.
Expand Down Expand Up @@ -817,7 +818,7 @@ def finalize_write_stream(
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.FinalizeWriteStreamResponse:
r"""Finalize a write stream so that no new data can be appended to
Expand Down Expand Up @@ -918,7 +919,7 @@ def batch_commit_write_streams(
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.BatchCommitWriteStreamsResponse:
r"""Atomically commits a group of ``PENDING`` streams that belong to
Expand Down Expand Up @@ -1027,7 +1028,7 @@ def flush_rows(
*,
write_stream: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.FlushRowsResponse:
r"""Flushes rows to a BUFFERED stream.
Expand Down Expand Up @@ -1145,14 +1146,9 @@ def __exit__(self, type, value, traceback):
self.transport.close()


try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


__all__ = ("BigQueryWriteClient",)
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

import google.auth # type: ignore
import google.api_core
Expand All @@ -28,14 +29,9 @@
from google.cloud.bigquery_storage_v1.types import storage
from google.cloud.bigquery_storage_v1.types import stream

try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


class BigQueryWriteTransport(abc.ABC):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
Type,
Union,
)
import pkg_resources

from google.cloud.bigquery_storage_v1beta2 import gapic_version as package_version

from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
Expand Down Expand Up @@ -229,7 +230,7 @@ async def create_read_session(
read_session: Optional[stream.ReadSession] = None,
max_stream_count: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> stream.ReadSession:
r"""Creates a new read session. A read session divides
Expand Down Expand Up @@ -392,7 +393,7 @@ def read_rows(
read_stream: Optional[str] = None,
offset: Optional[int] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]:
r"""Reads rows from the stream in the format prescribed
Expand Down Expand Up @@ -521,7 +522,7 @@ async def split_read_stream(
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage.SplitReadStreamResponse:
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
Expand Down Expand Up @@ -623,14 +624,9 @@ async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()


try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-storage",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)


__all__ = ("BigQueryReadAsyncClient",)
Loading

0 comments on commit 5e2fc1e

Please sign in to comment.