diff --git a/Makefile.datastore b/Makefile.datastore deleted file mode 100644 index d5f56cad1702..000000000000 --- a/Makefile.datastore +++ /dev/null @@ -1,53 +0,0 @@ -GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv -GENERATED_DIR=$(shell pwd)/generated_python -DATASTORE_DIR=$(shell pwd)/datastore/google/cloud/datastore/_generated -PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc -GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb - -help: - @echo 'Makefile for google-cloud-python Datastore protos ' - @echo ' ' - @echo ' make generate Generates the protobuf modules' - @echo ' make clean Clean generated files ' - -generate: - # Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools - [ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV) - $(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools - # Retrieve git repos that have our *.proto files. - [ -d googleapis-pb ] || git clone https://github.com/googleapis/googleapis googleapis-pb --depth=1 - cd googleapis-pb && git pull origin master - # Make the directory where our *_pb2.py files will go. - mkdir -p $(GENERATED_DIR) - # Generate all *_pb2.py files that do not require gRPC. - $(PROTOC_CMD) \ - --proto_path=$(GOOGLEAPIS_PROTOS_DIR) \ - --python_out=$(GENERATED_DIR) \ - $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1/datastore.proto \ - $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1/entity.proto \ - $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1/query.proto - # Move the newly generated *_pb2.py files into our library. - cp $(GENERATED_DIR)/google/datastore/v1/* $(DATASTORE_DIR) - # Remove all existing *.proto files before we replace - rm -f $(DATASTORE_DIR)/*.proto - # Copy over the *.proto files into our library. - cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1/*.proto $(DATASTORE_DIR) - # Rename all *.proto files in our library with an - # underscore and remove executable bit. - cd $(DATASTORE_DIR) && \ - for filename in *.proto; do \ - chmod -x $$filename ; \ - mv $$filename _$$filename ; \ - done - # Separate the gRPC parts of the datastore service from the - # non-gRPC parts so that the protos can be used without gRPC. - GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \ - GENERATED_SUBDIR=$(GENERATED_SUBDIR) \ - python scripts/make_datastore_grpc.py - # Rewrite the imports in the generated *_pb2.py files. - python scripts/rewrite_imports.py $(DATASTORE_DIR)/*pb2.py - -clean: - rm -fr $(GENERATED_DIR) - -.PHONY: generate clean diff --git a/README.rst b/README.rst index cd2c2d1fba72..0687cf48c4a7 100644 --- a/README.rst +++ b/README.rst @@ -37,7 +37,8 @@ Cloud Platform services: - `Google Cloud DNS`_ (`DNS README`_) - `Stackdriver Error Reporting`_ (`Error Reporting README`_) - `Google Cloud Natural Language`_ (`Natural Language README`_) -- `Google Translate`_ (`Translate README`_) +- `Google Cloud Translation`_ (`Translation README`_) +- `Google Cloud Speech`_ (`Speech README`_) - `Google Cloud Vision`_ (`Vision README`_) - `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_) - `Google Cloud Runtime Configuration`_ (`Runtime Config README`_) @@ -68,8 +69,10 @@ updates. See `versioning`_ for more details. .. _Error Reporting README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/error_reporting .. _Google Cloud Natural Language: https://pypi.python.org/pypi/google-cloud-language .. _Natural Language README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/language -.. _Google Translate: https://pypi.python.org/pypi/google-cloud-translate -.. _Translate README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/translate +.. _Google Cloud Translation: https://pypi.python.org/pypi/google-cloud-translate +.. _Translation README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/translate +.. _Google Cloud Speech: https://pypi.python.org/pypi/google-cloud-speech +.. _Speech README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/speech .. _Google Cloud Vision: https://pypi.python.org/pypi/google-cloud-vision .. _Vision README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/vision .. _Google Cloud Bigtable - HappyBase: https://pypi.python.org/pypi/google-cloud-happybase/ diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index 0db3c9fe9653..4e8004d716db 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -18,10 +18,10 @@ from collections import OrderedDict import datetime +from google.cloud._helpers import UTC from google.cloud._helpers import _date_from_iso8601_date from google.cloud._helpers import _datetime_from_microseconds from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud._helpers import _microseconds_from_datetime from google.cloud._helpers import _RFC3339_NO_FRACTION from google.cloud._helpers import _time_from_iso8601_time_naive from google.cloud._helpers import _to_bytes @@ -150,7 +150,11 @@ def _bytes_to_json(value): def _timestamp_to_json(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, datetime.datetime): - value = _microseconds_from_datetime(value) / 1.0e6 + if value.tzinfo not in (None, UTC): + # Convert to UTC and remove the time zone info. + value = value.replace(tzinfo=None) - value.utcoffset() + value = '%s %s+00:00' % ( + value.date().isoformat(), value.time().isoformat()) return value @@ -553,10 +557,12 @@ def from_api_repr(cls, resource): instance = cls(name) types = instance.struct_types for item in resource['parameterType']['structTypes']: - types[item['name']] = item['type'] + types[item['name']] = item['type']['type'] struct_values = resource['parameterValue']['structValues'] for key, value in struct_values.items(): - converted = _CELLDATA_FROM_JSON[types[key]](value, None) + type_ = types[key] + value = value['value'] + converted = _CELLDATA_FROM_JSON[type_](value, None) instance.struct_values[key] = converted return instance @@ -567,7 +573,7 @@ def to_api_repr(self): :returns: JSON mapping """ types = [ - {'name': key, 'type': value} + {'name': key, 'type': {'type': value}} for key, value in self.struct_types.items() ] values = {} @@ -575,10 +581,11 @@ def to_api_repr(self): converter = _SCALAR_VALUE_TO_JSON.get(self.struct_types[name]) if converter is not None: value = converter(value) - values[name] = value + values[name] = {'value': value} resource = { 'parameterType': { + 'type': 'STRUCT', 'structTypes': types, }, 'parameterValue': { diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 0c01578f5ffe..e98f390ff616 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -58,20 +58,25 @@ class Client(JSONClient): passed when creating a dataset / job. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, project=None, credentials=None, http=None): + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def list_projects(self, max_results=None, page_token=None): """List projects for the project associated with this client. diff --git a/bigquery/unit_tests/test__helpers.py b/bigquery/unit_tests/test__helpers.py index 0e508aba2da0..cc2df7b19006 100644 --- a/bigquery/unit_tests/test__helpers.py +++ b/bigquery/unit_tests/test__helpers.py @@ -546,13 +546,35 @@ def _call_fut(self, value): def test_w_float(self): self.assertEqual(self._call_fut(1.234567), 1.234567) - def test_w_datetime(self): + def test_w_string(self): + ZULU = '2016-12-20 15:58:27.339328+00:00' + self.assertEqual(self._call_fut(ZULU), ZULU) + + def test_w_datetime_wo_zone(self): + import datetime + ZULU = '2016-12-20 15:58:27.339328+00:00' + when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) + self.assertEqual(self._call_fut(when), ZULU) + + def test_w_datetime_w_non_utc_zone(self): + import datetime + + class _Zone(datetime.tzinfo): + + def utcoffset(self, _): + return datetime.timedelta(minutes=-240) + + ZULU = '2016-12-20 19:58:27.339328+00:00' + when = datetime.datetime( + 2016, 12, 20, 15, 58, 27, 339328, tzinfo=_Zone()) + self.assertEqual(self._call_fut(when), ZULU) + + def test_w_datetime_w_utc_zone(self): import datetime from google.cloud._helpers import UTC - from google.cloud._helpers import _microseconds_from_datetime - when = datetime.datetime(2016, 12, 3, 14, 11, 27, tzinfo=UTC) - self.assertEqual(self._call_fut(when), - _microseconds_from_datetime(when) / 1e6) + ZULU = '2016-12-20 15:58:27.339328+00:00' + when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) + self.assertEqual(self._call_fut(when), ZULU) class Test_datetime_to_json(unittest.TestCase): @@ -907,20 +929,20 @@ def test_to_api_repr_w_bool(self): self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_timestamp_datetime(self): + from google.cloud._helpers import UTC import datetime - from google.cloud._helpers import _microseconds_from_datetime - now = datetime.datetime.utcnow() - seconds = _microseconds_from_datetime(now) / 1.0e6 + STAMP = '2016-12-20 15:58:27.339328+00:00' + when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) EXPECTED = { 'parameterType': { 'type': 'TIMESTAMP', }, 'parameterValue': { - 'value': seconds, + 'value': STAMP, }, } klass = self._get_target_class() - param = klass.positional(type_='TIMESTAMP', value=now) + param = klass.positional(type_='TIMESTAMP', value=when) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_timestamp_micros(self): @@ -1060,6 +1082,7 @@ def test_from_api_repr_w_name(self): def test_from_api_repr_wo_name(self): RESOURCE = { 'parameterType': { + 'type': 'ARRAY', 'arrayType': 'INT64', }, 'parameterValue': { @@ -1076,6 +1099,7 @@ def test_to_api_repr_w_name(self): EXPECTED = { 'name': 'foo', 'parameterType': { + 'type': 'ARRAY', 'arrayType': 'INT64', }, 'parameterValue': { @@ -1088,6 +1112,7 @@ def test_to_api_repr_w_name(self): def test_to_api_repr_wo_name(self): EXPECTED = { 'parameterType': { + 'type': 'ARRAY', 'arrayType': 'INT64', }, 'parameterValue': { @@ -1101,6 +1126,7 @@ def test_to_api_repr_wo_name(self): def test_to_api_repr_w_unknown_type(self): EXPECTED = { 'parameterType': { + 'type': 'ARRAY', 'arrayType': 'UNKNOWN', }, 'parameterValue': { @@ -1148,13 +1174,17 @@ def test_from_api_repr_w_name(self): RESOURCE = { 'name': 'foo', 'parameterType': { + 'type': 'STRUCT', 'structTypes': [ - {'name': 'bar', 'type': 'INT64'}, - {'name': 'baz', 'type': 'STRING'}, + {'name': 'bar', 'type': {'type': 'INT64'}}, + {'name': 'baz', 'type': {'type': 'STRING'}}, ], }, 'parameterValue': { - 'structValues': {'bar': 123, 'baz': 'abc'}, + 'structValues': { + 'bar': {'value': 123}, + 'baz': {'value': 'abc'}, + }, }, } klass = self._get_target_class() @@ -1166,13 +1196,17 @@ def test_from_api_repr_w_name(self): def test_from_api_repr_wo_name(self): RESOURCE = { 'parameterType': { + 'type': 'STRUCT', 'structTypes': [ - {'name': 'bar', 'type': 'INT64'}, - {'name': 'baz', 'type': 'STRING'}, + {'name': 'bar', 'type': {'type': 'INT64'}}, + {'name': 'baz', 'type': {'type': 'STRING'}}, ], }, 'parameterValue': { - 'structValues': {'bar': 123, 'baz': 'abc'}, + 'structValues': { + 'bar': {'value': 123}, + 'baz': {'value': 'abc'}, + }, }, } klass = self._get_target_class() @@ -1185,13 +1219,17 @@ def test_to_api_repr_w_name(self): EXPECTED = { 'name': 'foo', 'parameterType': { + 'type': 'STRUCT', 'structTypes': [ - {'name': 'bar', 'type': 'INT64'}, - {'name': 'baz', 'type': 'STRING'}, + {'name': 'bar', 'type': {'type': 'INT64'}}, + {'name': 'baz', 'type': {'type': 'STRING'}}, ], }, 'parameterValue': { - 'structValues': {'bar': '123', 'baz': 'abc'}, + 'structValues': { + 'bar': {'value': '123'}, + 'baz': {'value': 'abc'}, + }, }, } sub_1 = self._make_subparam('bar', 'INT64', 123) @@ -1202,13 +1240,17 @@ def test_to_api_repr_w_name(self): def test_to_api_repr_wo_name(self): EXPECTED = { 'parameterType': { + 'type': 'STRUCT', 'structTypes': [ - {'name': 'bar', 'type': 'INT64'}, - {'name': 'baz', 'type': 'STRING'}, + {'name': 'bar', 'type': {'type': 'INT64'}}, + {'name': 'baz', 'type': {'type': 'STRING'}}, ], }, 'parameterValue': { - 'structValues': {'bar': '123', 'baz': 'abc'}, + 'structValues': { + 'bar': {'value': '123'}, + 'baz': {'value': 'abc'}, + }, }, } sub_1 = self._make_subparam('bar', 'INT64', 123) diff --git a/circle.yml b/circle.yml index dad5df83ec38..9ff6c2db10d2 100644 --- a/circle.yml +++ b/circle.yml @@ -24,7 +24,8 @@ general: deployment: release: - tag: /(([a-z]+)-)?([0-9]+)\.([0-9]+)\.([0-9]+)/ + # See "scripts/circleci_tagged_pkg.py" for info on REGEX + tag: /(([a-z]+)-)*([0-9]+)\.([0-9]+)\.([0-9]+)/ owner: GoogleCloudPlatform commands: - pip install --upgrade twine diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index ab413f91a652..7a14e03f763a 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -19,7 +19,6 @@ import six from google.cloud._helpers import _determine_default_project -from google.cloud._http import Connection from google.cloud.credentials import get_credentials @@ -72,24 +71,23 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): class Client(_ClientFactoryMixin): """Client to bundle configuration needed for API requests. - Assumes that the associated ``_connection_class`` only accepts - ``http`` and ``credentials`` in its constructor. + Stores ``credentials`` and ``http`` object so that subclasses + can pass them along to a connection class. - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection - def __init__(self, credentials=None, http=None): if (credentials is not None and not isinstance( @@ -97,8 +95,8 @@ def __init__(self, credentials=None, http=None): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and http is None: credentials = get_credentials() - self._connection = self._connection_class( - credentials=credentials, http=http) + self._credentials = credentials + self._http = http class _ClientProjectMixin(object): @@ -142,15 +140,16 @@ class JSONClient(Client, _ClientProjectMixin): passed falls back to the default inferred from the environment. - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. diff --git a/core/tox.ini b/core/tox.ini index 48e8a517f057..156ffc07e00e 100644 --- a/core/tox.ini +++ b/core/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] deps = - grpcio >= 1.0.2rc0 + grpcio >= 1.0.2 mock pytest covercmd = diff --git a/core/unit_tests/test_client.py b/core/unit_tests/test_client.py index 21d036c06ad0..dd1075aae5f8 100644 --- a/core/unit_tests/test_client.py +++ b/core/unit_tests/test_client.py @@ -36,15 +36,6 @@ def test_virtual(self): class TestClient(unittest.TestCase): - def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection - - def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class - @staticmethod def _get_target_class(): from google.cloud.client import Client @@ -67,8 +58,8 @@ def mock_get_credentials(): with _Monkey(client, get_credentials=mock_get_credentials): client_obj = self._make_one() - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIsNone(client_obj._http) self.assertEqual(FUNC_CALLS, ['get_credentials']) def test_ctor_explicit(self): @@ -76,9 +67,8 @@ def test_ctor_explicit(self): HTTP = object() client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) - self.assertIs(client_obj._connection.http, HTTP) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._http, HTTP) def test_ctor_bad_credentials(self): CREDENTIALS = object() @@ -99,7 +89,8 @@ def test_from_service_account_json(self): mock.sentinel.filename) self.assertIs( - client_obj._connection.credentials, constructor.return_value) + client_obj._credentials, constructor.return_value) + self.assertIsNone(client_obj._http) constructor.assert_called_once_with(mock.sentinel.filename) def test_from_service_account_json_bad_args(self): @@ -112,15 +103,6 @@ def test_from_service_account_json_bad_args(self): class TestJSONClient(unittest.TestCase): - def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection - - def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class - @staticmethod def _get_target_class(): from google.cloud.client import JSONClient @@ -150,8 +132,8 @@ def mock_get_credentials(): client_obj = self._make_one() self.assertEqual(client_obj.project, PROJECT) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIsNone(client_obj._http) self.assertEqual( FUNC_CALLS, [(None, '_determine_default_project'), 'get_credentials']) @@ -191,9 +173,8 @@ def _explicit_ctor_helper(self, project): self.assertEqual(client_obj.project, project.decode('utf-8')) else: self.assertEqual(client_obj.project, project) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) - self.assertIs(client_obj._connection.http, HTTP) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._http, HTTP) def test_ctor_explicit_bytes(self): PROJECT = b'PROJECT' @@ -202,10 +183,3 @@ def test_ctor_explicit_bytes(self): def test_ctor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) - - -class _MockConnection(object): - - def __init__(self, credentials=None, http=None): - self.credentials = credentials - self.http = http diff --git a/datastore/.coveragerc b/datastore/.coveragerc index 08f3fdea2433..a54b99aa14b7 100644 --- a/datastore/.coveragerc +++ b/datastore/.coveragerc @@ -2,8 +2,6 @@ branch = True [report] -omit = - */_generated/*.py fail_under = 100 show_missing = True exclude_lines = diff --git a/datastore/google/cloud/datastore/_generated/__init__.py b/datastore/google/cloud/datastore/_generated/__init__.py deleted file mode 100644 index 5b2724764514..000000000000 --- a/datastore/google/cloud/datastore/_generated/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Generated protobuf modules for Google Cloud Datastore API.""" diff --git a/datastore/google/cloud/datastore/_generated/_datastore.proto b/datastore/google/cloud/datastore/_generated/_datastore.proto deleted file mode 100644 index 5881e9a14714..000000000000 --- a/datastore/google/cloud/datastore/_generated/_datastore.proto +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/datastore/v1/entity.proto"; -import "google/datastore/v1/query.proto"; - -option java_multiple_files = true; -option java_outer_classname = "DatastoreProto"; -option java_package = "com.google.datastore.v1"; - - -// Each RPC normalizes the partition IDs of the keys in its input entities, -// and always returns entities with keys with normalized partition IDs. -// This applies to all keys and entities, including those in values, except keys -// with both an empty path and an empty or unset partition ID. Normalization of -// input keys sets the project ID (if not already set) to the project ID from -// the request. -// -service Datastore { - // Looks up entities by key. - rpc Lookup(LookupRequest) returns (LookupResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:lookup" body: "*" }; - } - - // Queries for entities. - rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:runQuery" body: "*" }; - } - - // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" }; - } - - // Commits a transaction, optionally creating, deleting or modifying some - // entities. - rpc Commit(CommitRequest) returns (CommitResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:commit" body: "*" }; - } - - // Rolls back a transaction. - rpc Rollback(RollbackRequest) returns (RollbackResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:rollback" body: "*" }; - } - - // Allocates IDs for the given keys, which is useful for referencing an entity - // before it is inserted. - rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { - option (google.api.http) = { post: "/v1/projects/{project_id}:allocateIds" body: "*" }; - } -} - -// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // The options for this lookup request. - ReadOptions read_options = 1; - - // Keys of entities to look up. - repeated Key keys = 3; -} - -// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. -message LookupResponse { - // Entities found as `ResultType.FULL` entities. The order of results in this - // field is undefined and has no relation to the order of the keys in the - // input. - repeated EntityResult found = 1; - - // Entities not found as `ResultType.KEY_ONLY` entities. The order of results - // in this field is undefined and has no relation to the order of the keys - // in the input. - repeated EntityResult missing = 2; - - // A list of keys that were not looked up due to resource constraints. The - // order of results in this field is undefined and has no relation to the - // order of the keys in the input. - repeated Key deferred = 3; -} - -// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // Entities are partitioned into subsets, identified by a partition ID. - // Queries are scoped to a single partition. - // This partition ID is normalized with the standard default context - // partition ID. - PartitionId partition_id = 2; - - // The options for this query. - ReadOptions read_options = 1; - - // The type of query. - oneof query_type { - // The query to run. - Query query = 3; - - // The GQL query to run. - GqlQuery gql_query = 7; - } -} - -// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. -message RunQueryResponse { - // A batch of query results (always present). - QueryResultBatch batch = 1; - - // The parsed form of the `GqlQuery` from the request, if it was set. - Query query = 2; -} - -// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionRequest { - // The ID of the project against which to make the request. - string project_id = 8; -} - -// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. -message BeginTransactionResponse { - // The transaction identifier (always present). - bytes transaction = 1; -} - -// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -message RollbackRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // The transaction identifier, returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; -} - -// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. -// (an empty message). -message RollbackResponse { - -} - -// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitRequest { - // The modes available for commits. - enum Mode { - // Unspecified. This value must not be used. - MODE_UNSPECIFIED = 0; - - // Transactional: The mutations are either all applied, or none are applied. - // Learn about transactions [here](https://cloud.google.com/datastore/docs/concepts/transactions). - TRANSACTIONAL = 1; - - // Non-transactional: The mutations may not apply as all or none. - NON_TRANSACTIONAL = 2; - } - - // The ID of the project against which to make the request. - string project_id = 8; - - // The type of commit to perform. Defaults to `TRANSACTIONAL`. - Mode mode = 5; - - // Must be set when mode is `TRANSACTIONAL`. - oneof transaction_selector { - // The identifier of the transaction associated with the commit. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; - } - - // The mutations to perform. - // - // When mode is `TRANSACTIONAL`, mutations affecting a single entity are - // applied in order. The following sequences of mutations affecting a single - // entity are not permitted in a single `Commit` request: - // - // - `insert` followed by `insert` - // - `update` followed by `insert` - // - `upsert` followed by `insert` - // - `delete` followed by `update` - // - // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single - // entity. - repeated Mutation mutations = 6; -} - -// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. -message CommitResponse { - // The result of performing the mutations. - // The i-th mutation result corresponds to the i-th mutation in the request. - repeated MutationResult mutation_results = 3; - - // The number of index entries updated during the commit, or zero if none were - // updated. - int32 index_updates = 4; -} - -// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; - - // A list of keys with incomplete key paths for which to allocate IDs. - // No key may be reserved/read-only. - repeated Key keys = 1; -} - -// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. -message AllocateIdsResponse { - // The keys specified in the request (in the same order), each with - // its key path completed with a newly allocated ID. - repeated Key keys = 1; -} - -// A mutation to apply to an entity. -message Mutation { - // The mutation operation. - // - // For `insert`, `update`, and `upsert`: - // - The entity's key must not be reserved/read-only. - // - No property in the entity may have a reserved name, - // not even a property in an entity in a value. - // - No value in the entity may have meaning 18, - // not even a value in an entity in another value. - oneof operation { - // The entity to insert. The entity must not already exist. - // The entity key's final path element may be incomplete. - Entity insert = 4; - - // The entity to update. The entity must already exist. - // Must have a complete key path. - Entity update = 5; - - // The entity to upsert. The entity may or may not already exist. - // The entity key's final path element may be incomplete. - Entity upsert = 6; - - // The key of the entity to delete. The entity may or may not already exist. - // Must have a complete key path and must not be reserved/read-only. - Key delete = 7; - } - - // When set, the server will detect whether or not this mutation conflicts - // with the current version of the entity on the server. Conflicting mutations - // are not applied, and are marked as such in MutationResult. - oneof conflict_detection_strategy { - // The version of the entity that this mutation is being applied to. If this - // does not match the current version on the server, the mutation conflicts. - int64 base_version = 8; - } -} - -// The result of applying a mutation. -message MutationResult { - // The automatically allocated key. - // Set only when the mutation allocated a key. - Key key = 3; - - // The version of the entity on the server after processing the mutation. If - // the mutation doesn't change anything on the server, then the version will - // be the version of the current entity or, if no entity is present, a version - // that is strictly greater than the version of any previous entity and less - // than the version of any possible future entity. - int64 version = 4; - - // Whether a conflict was detected for this mutation. Always false when a - // conflict detection strategy field is not set in the mutation. - bool conflict_detected = 5; -} - -// The options shared by read requests. -message ReadOptions { - // The possible values for read consistencies. - enum ReadConsistency { - // Unspecified. This value must not be used. - READ_CONSISTENCY_UNSPECIFIED = 0; - - // Strong consistency. - STRONG = 1; - - // Eventual consistency. - EVENTUAL = 2; - } - - // If not specified, lookups and ancestor queries default to - // `read_consistency`=`STRONG`, global queries default to - // `read_consistency`=`EVENTUAL`. - oneof consistency_type { - // The non-transactional read consistency to use. - // Cannot be set to `STRONG` for global queries. - ReadConsistency read_consistency = 1; - - // The identifier of the transaction in which to read. A - // transaction identifier is returned by a call to - // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 2; - } -} diff --git a/datastore/google/cloud/datastore/_generated/_entity.proto b/datastore/google/cloud/datastore/_generated/_entity.proto deleted file mode 100644 index a0e7d39138f2..000000000000 --- a/datastore/google/cloud/datastore/_generated/_entity.proto +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/type/latlng.proto"; - -option java_multiple_files = true; -option java_outer_classname = "EntityProto"; -option java_package = "com.google.datastore.v1"; - - -// A partition ID identifies a grouping of entities. The grouping is always -// by project and namespace, however the namespace ID may be empty. -// -// A partition ID contains several dimensions: -// project ID and namespace ID. -// -// Partition dimensions: -// -// - May be `""`. -// - Must be valid UTF-8 bytes. -// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` -// If the value of any dimension matches regex `__.*__`, the partition is -// reserved/read-only. -// A reserved/read-only partition ID is forbidden in certain documented -// contexts. -// -// Foreign partition IDs (in which the project ID does -// not match the context project ID ) are discouraged. -// Reads and writes of foreign partition IDs may fail if the project is not in an active state. -message PartitionId { - // The ID of the project to which the entities belong. - string project_id = 2; - - // If not empty, the ID of the namespace to which the entities belong. - string namespace_id = 4; -} - -// A unique identifier for an entity. -// If a key's partition ID or any of its path kinds or names are -// reserved/read-only, the key is reserved/read-only. -// A reserved/read-only key is forbidden in certain documented contexts. -message Key { - // A (kind, ID/name) pair used to construct a key path. - // - // If either name or ID is set, the element is complete. - // If neither is set, the element is incomplete. - message PathElement { - // The kind of the entity. - // A kind matching regex `__.*__` is reserved/read-only. - // A kind must not contain more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string kind = 1; - - // The type of ID. - oneof id_type { - // The auto-allocated ID of the entity. - // Never equal to zero. Values less than zero are discouraged and may not - // be supported in the future. - int64 id = 2; - - // The name of the entity. - // A name matching regex `__.*__` is reserved/read-only. - // A name must not be more than 1500 bytes when UTF-8 encoded. - // Cannot be `""`. - string name = 3; - } - } - - // Entities are partitioned into subsets, currently identified by a project - // ID and namespace ID. - // Queries are scoped to a single partition. - PartitionId partition_id = 1; - - // The entity path. - // An entity path consists of one or more elements composed of a kind and a - // string or numerical identifier, which identify entities. The first - // element identifies a _root entity_, the second element identifies - // a _child_ of the root entity, the third element identifies a child of the - // second entity, and so forth. The entities identified by all prefixes of - // the path are called the element's _ancestors_. - // - // An entity path is always fully complete: *all* of the entity's ancestors - // are required to be in the path along with the entity identifier itself. - // The only exception is that in some documented cases, the identifier in the - // last path element (for the entity) itself may be omitted. For example, - // the last path element of the key of `Mutation.insert` may have no - // identifier. - // - // A path can never be empty, and a path can have at most 100 elements. - repeated PathElement path = 2; -} - -// An array value. -message ArrayValue { - // Values in the array. - // The order of this array may not be preserved if it contains a mix of - // indexed and unindexed values. - repeated Value values = 1; -} - -// A message that can hold any of the supported value types and associated -// metadata. -message Value { - // Must have a value set. - oneof value_type { - // A null value. - google.protobuf.NullValue null_value = 11; - - // A boolean value. - bool boolean_value = 1; - - // An integer value. - int64 integer_value = 2; - - // A double value. - double double_value = 3; - - // A timestamp value. - // When stored in the Datastore, precise only to microseconds; - // any additional precision is rounded down. - google.protobuf.Timestamp timestamp_value = 10; - - // A key value. - Key key_value = 5; - - // A UTF-8 encoded string value. - // When `exclude_from_indexes` is false (it is indexed) , may have at most 1500 bytes. - // Otherwise, may be set to at least 1,000,000 bytes. - string string_value = 17; - - // A blob value. - // May have at most 1,000,000 bytes. - // When `exclude_from_indexes` is false, may have at most 1500 bytes. - // In JSON requests, must be base64-encoded. - bytes blob_value = 18; - - // A geo point value representing a point on the surface of Earth. - google.type.LatLng geo_point_value = 8; - - // An entity value. - // - // - May have no key. - // - May have a key with an incomplete key path. - // - May have a reserved/read-only key. - Entity entity_value = 6; - - // An array value. - // Cannot contain another array value. - // A `Value` instance that sets field `array_value` must not set fields - // `meaning` or `exclude_from_indexes`. - ArrayValue array_value = 9; - } - - // The `meaning` field should only be populated for backwards compatibility. - int32 meaning = 14; - - // If the value should be excluded from all indexes including those defined - // explicitly. - bool exclude_from_indexes = 19; -} - -// A Datastore data object. -// -// An entity is limited to 1 megabyte when stored. That _roughly_ -// corresponds to a limit of 1 megabyte for the serialized form of this -// message. -message Entity { - // The entity's key. - // - // An entity must have a key, unless otherwise documented (for example, - // an entity in `Value.entity_value` may have no key). - // An entity's kind is its key path's last element's kind, - // or null if it has no key. - Key key = 1; - - // The entity's properties. - // The map's keys are property names. - // A property name matching regex `__.*__` is reserved. - // A reserved property name is forbidden in certain documented contexts. - // The name must not contain more than 500 characters. - // The name cannot be `""`. - map properties = 3; -} diff --git a/datastore/google/cloud/datastore/_generated/_query.proto b/datastore/google/cloud/datastore/_generated/_query.proto deleted file mode 100644 index e6dba2b226f9..000000000000 --- a/datastore/google/cloud/datastore/_generated/_query.proto +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.datastore.v1; - -import "google/api/annotations.proto"; -import "google/datastore/v1/entity.proto"; -import "google/protobuf/wrappers.proto"; -import "google/type/latlng.proto"; - -option java_multiple_files = true; -option java_outer_classname = "QueryProto"; -option java_package = "com.google.datastore.v1"; - - -// The result of fetching an entity from Datastore. -message EntityResult { - // Specifies what data the 'entity' field contains. - // A `ResultType` is either implied (for example, in `LookupResponse.missing` - // from `datastore.proto`, it is always `KEY_ONLY`) or specified by context - // (for example, in message `QueryResultBatch`, field `entity_result_type` - // specifies a `ResultType` for all the values in field `entity_results`). - enum ResultType { - // Unspecified. This value is never used. - RESULT_TYPE_UNSPECIFIED = 0; - - // The key and properties. - FULL = 1; - - // A projected subset of properties. The entity may have no key. - PROJECTION = 2; - - // Only the key. - KEY_ONLY = 3; - } - - // The resulting entity. - Entity entity = 1; - - // The version of the entity, a strictly positive number that monotonically - // increases with changes to the entity. - // - // This field is set for [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity - // results. - // - // For [missing][google.datastore.v1.LookupResponse.missing] entities in `LookupResponse`, this - // is the version of the snapshot that was used to look up the entity, and it - // is always set except for eventually consistent reads. - int64 version = 4; - - // A cursor that points to the position after the result entity. - // Set only when the `EntityResult` is part of a `QueryResultBatch` message. - bytes cursor = 3; -} - -// A query for entities. -message Query { - // The projection to return. Defaults to returning all properties. - repeated Projection projection = 2; - - // The kinds to query (if empty, returns entities of all kinds). - // Currently at most 1 kind may be specified. - repeated KindExpression kind = 3; - - // The filter to apply. - Filter filter = 4; - - // The order to apply to the query results (if empty, order is unspecified). - repeated PropertyOrder order = 5; - - // The properties to make distinct. The query results will contain the first - // result for each distinct combination of values for the given properties - // (if empty, all results are returned). - repeated PropertyReference distinct_on = 6; - - // A starting point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to continue the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes start_cursor = 7; - - // An ending point for the query results. Query cursors are - // returned in query result batches and - // [can only be used to limit the same query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). - bytes end_cursor = 8; - - // The number of results to skip. Applies before limit, but after all other - // constraints. Optional. Must be >= 0 if specified. - int32 offset = 10; - - // The maximum number of results to return. Applies after all other - // constraints. Optional. - // Unspecified is interpreted as no limit. - // Must be >= 0 if specified. - google.protobuf.Int32Value limit = 12; -} - -// A representation of a kind. -message KindExpression { - // The name of the kind. - string name = 1; -} - -// A reference to a property relative to the kind expressions. -message PropertyReference { - // The name of the property. - // If name includes "."s, it may be interpreted as a property name path. - string name = 2; -} - -// A representation of a property in a projection. -message Projection { - // The property to project. - PropertyReference property = 1; -} - -// The desired order for a specific property. -message PropertyOrder { - // The sort direction. - enum Direction { - // Unspecified. This value must not be used. - DIRECTION_UNSPECIFIED = 0; - - // Ascending. - ASCENDING = 1; - - // Descending. - DESCENDING = 2; - } - - // The property to order by. - PropertyReference property = 1; - - // The direction to order by. Defaults to `ASCENDING`. - Direction direction = 2; -} - -// A holder for any type of filter. -message Filter { - // The type of filter. - oneof filter_type { - // A composite filter. - CompositeFilter composite_filter = 1; - - // A filter on a property. - PropertyFilter property_filter = 2; - } -} - -// A filter that merges multiple other filters using the given operator. -message CompositeFilter { - // A composite filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // The results are required to satisfy each of the combined filters. - AND = 1; - } - - // The operator for combining multiple filters. - Operator op = 1; - - // The list of filters to combine. - // Must contain at least one filter. - repeated Filter filters = 2; -} - -// A filter on a specific property. -message PropertyFilter { - // A property filter operator. - enum Operator { - // Unspecified. This value must not be used. - OPERATOR_UNSPECIFIED = 0; - - // Less than. - LESS_THAN = 1; - - // Less than or equal. - LESS_THAN_OR_EQUAL = 2; - - // Greater than. - GREATER_THAN = 3; - - // Greater than or equal. - GREATER_THAN_OR_EQUAL = 4; - - // Equal. - EQUAL = 5; - - // Has ancestor. - HAS_ANCESTOR = 11; - } - - // The property to filter by. - PropertyReference property = 1; - - // The operator to filter by. - Operator op = 2; - - // The value to compare the property to. - Value value = 3; -} - -// A [GQL query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). -message GqlQuery { - // A string of the format described - // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). - string query_string = 1; - - // When false, the query string must not contain any literals and instead must - // bind all values. For example, - // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while - // `SELECT * FROM Kind WHERE a = @value` is. - bool allow_literals = 2; - - // For each non-reserved named binding site in the query string, there must be - // a named parameter with that name, but not necessarily the inverse. - // - // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex - // `__.*__`, and must not be `""`. - map named_bindings = 5; - - // Numbered binding site @1 references the first numbered parameter, - // effectively using 1-based indexing, rather than the usual 0. - // - // For each binding site numbered i in `query_string`, there must be an i-th - // numbered parameter. The inverse must also be true. - repeated GqlQueryParameter positional_bindings = 4; -} - -// A binding parameter for a GQL query. -message GqlQueryParameter { - // The type of parameter. - oneof parameter_type { - // A value parameter. - Value value = 2; - - // A query cursor. Query cursors are returned in query - // result batches. - bytes cursor = 3; - } -} - -// A batch of results produced by a query. -message QueryResultBatch { - // The possible values for the `more_results` field. - enum MoreResultsType { - // Unspecified. This value is never used. - MORE_RESULTS_TYPE_UNSPECIFIED = 0; - - // There may be additional batches to fetch from this query. - NOT_FINISHED = 1; - - // The query is finished, but there may be more results after the limit. - MORE_RESULTS_AFTER_LIMIT = 2; - - // The query is finished, but there may be more results after the end - // cursor. - MORE_RESULTS_AFTER_CURSOR = 4; - - // The query has been exhausted. - NO_MORE_RESULTS = 3; - } - - // The number of results skipped, typically because of an offset. - int32 skipped_results = 6; - - // A cursor that points to the position after the last skipped result. - // Will be set when `skipped_results` != 0. - bytes skipped_cursor = 3; - - // The result type for every entity in `entity_results`. - EntityResult.ResultType entity_result_type = 1; - - // The results for this batch. - repeated EntityResult entity_results = 2; - - // A cursor that points to the position after the last result in the batch. - bytes end_cursor = 4; - - // The state of the query after the current batch. - MoreResultsType more_results = 5; - - // The version number of the snapshot this batch was returned from. - // This applies to the range of results from the query's `start_cursor` (or - // the beginning of the query if no cursor was given) to this batch's - // `end_cursor` (not the query's `end_cursor`). - // - // In a single transaction, subsequent query result batches for the same query - // can have a greater snapshot version number. Each batch's snapshot version - // is valid for all preceding batches. - int64 snapshot_version = 7; -} diff --git a/datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py b/datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py deleted file mode 100644 index beea35710c6c..000000000000 --- a/datastore/google/cloud/datastore/_generated/datastore_grpc_pb2.py +++ /dev/null @@ -1,301 +0,0 @@ -# BEGIN: Imports from datastore_pb2 -from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsRequest -from google.cloud.datastore._generated.datastore_pb2 import AllocateIdsResponse -from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionRequest -from google.cloud.datastore._generated.datastore_pb2 import BeginTransactionResponse -from google.cloud.datastore._generated.datastore_pb2 import CommitRequest -from google.cloud.datastore._generated.datastore_pb2 import CommitResponse -from google.cloud.datastore._generated.datastore_pb2 import LookupRequest -from google.cloud.datastore._generated.datastore_pb2 import LookupResponse -from google.cloud.datastore._generated.datastore_pb2 import Mutation -from google.cloud.datastore._generated.datastore_pb2 import MutationResult -from google.cloud.datastore._generated.datastore_pb2 import ReadOptions -from google.cloud.datastore._generated.datastore_pb2 import RollbackRequest -from google.cloud.datastore._generated.datastore_pb2 import RollbackResponse -from google.cloud.datastore._generated.datastore_pb2 import RunQueryRequest -from google.cloud.datastore._generated.datastore_pb2 import RunQueryResponse -# END: Imports from datastore_pb2 -import grpc -from grpc.beta import implementations as beta_implementations -from grpc.beta import interfaces as beta_interfaces -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities as face_utilities - - -class DatastoreStub(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Lookup = channel.unary_unary( - '/google.datastore.v1.Datastore/Lookup', - request_serializer=LookupRequest.SerializeToString, - response_deserializer=LookupResponse.FromString, - ) - self.RunQuery = channel.unary_unary( - '/google.datastore.v1.Datastore/RunQuery', - request_serializer=RunQueryRequest.SerializeToString, - response_deserializer=RunQueryResponse.FromString, - ) - self.BeginTransaction = channel.unary_unary( - '/google.datastore.v1.Datastore/BeginTransaction', - request_serializer=BeginTransactionRequest.SerializeToString, - response_deserializer=BeginTransactionResponse.FromString, - ) - self.Commit = channel.unary_unary( - '/google.datastore.v1.Datastore/Commit', - request_serializer=CommitRequest.SerializeToString, - response_deserializer=CommitResponse.FromString, - ) - self.Rollback = channel.unary_unary( - '/google.datastore.v1.Datastore/Rollback', - request_serializer=RollbackRequest.SerializeToString, - response_deserializer=RollbackResponse.FromString, - ) - self.AllocateIds = channel.unary_unary( - '/google.datastore.v1.Datastore/AllocateIds', - request_serializer=AllocateIdsRequest.SerializeToString, - response_deserializer=AllocateIdsResponse.FromString, - ) - - -class DatastoreServicer(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - - def Lookup(self, request, context): - """Looks up entities by key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RunQuery(self, request, context): - """Queries for entities. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def BeginTransaction(self, request, context): - """Begins a new transaction. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Commit(self, request, context): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Rollback(self, request, context): - """Rolls back a transaction. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AllocateIds(self, request, context): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_DatastoreServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Lookup': grpc.unary_unary_rpc_method_handler( - servicer.Lookup, - request_deserializer=LookupRequest.FromString, - response_serializer=LookupResponse.SerializeToString, - ), - 'RunQuery': grpc.unary_unary_rpc_method_handler( - servicer.RunQuery, - request_deserializer=RunQueryRequest.FromString, - response_serializer=RunQueryResponse.SerializeToString, - ), - 'BeginTransaction': grpc.unary_unary_rpc_method_handler( - servicer.BeginTransaction, - request_deserializer=BeginTransactionRequest.FromString, - response_serializer=BeginTransactionResponse.SerializeToString, - ), - 'Commit': grpc.unary_unary_rpc_method_handler( - servicer.Commit, - request_deserializer=CommitRequest.FromString, - response_serializer=CommitResponse.SerializeToString, - ), - 'Rollback': grpc.unary_unary_rpc_method_handler( - servicer.Rollback, - request_deserializer=RollbackRequest.FromString, - response_serializer=RollbackResponse.SerializeToString, - ), - 'AllocateIds': grpc.unary_unary_rpc_method_handler( - servicer.AllocateIds, - request_deserializer=AllocateIdsRequest.FromString, - response_serializer=AllocateIdsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.datastore.v1.Datastore', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - -class BetaDatastoreServicer(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, context): - """Looks up entities by key. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def RunQuery(self, request, context): - """Queries for entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def BeginTransaction(self, request, context): - """Begins a new transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Commit(self, request, context): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Rollback(self, request, context): - """Rolls back a transaction. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def AllocateIds(self, request, context): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - -class BetaDatastoreStub(object): - """Each RPC normalizes the partition IDs of the keys in its input entities, - and always returns entities with keys with normalized partition IDs. - This applies to all keys and entities, including those in values, except keys - with both an empty path and an empty or unset partition ID. Normalization of - input keys sets the project ID (if not already set) to the project ID from - the request. - - """ - def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Looks up entities by key. - """ - raise NotImplementedError() - Lookup.future = None - def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Queries for entities. - """ - raise NotImplementedError() - RunQuery.future = None - def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Begins a new transaction. - """ - raise NotImplementedError() - BeginTransaction.future = None - def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Commits a transaction, optionally creating, deleting or modifying some - entities. - """ - raise NotImplementedError() - Commit.future = None - def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Rolls back a transaction. - """ - raise NotImplementedError() - Rollback.future = None - def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Allocates IDs for the given keys, which is useful for referencing an entity - before it is inserted. - """ - raise NotImplementedError() - AllocateIds.future = None - - -def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - request_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, - } - response_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, - } - method_implementations = { - ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), - ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), - ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), - ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), - ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), - ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - -def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - request_serializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, - } - response_deserializers = { - ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, - ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, - ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, - ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, - ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, - ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, - } - cardinalities = { - 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, - 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, - 'Commit': cardinality.Cardinality.UNARY_UNARY, - 'Lookup': cardinality.Cardinality.UNARY_UNARY, - 'Rollback': cardinality.Cardinality.UNARY_UNARY, - 'RunQuery': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) diff --git a/datastore/google/cloud/datastore/_generated/datastore_pb2.py b/datastore/google/cloud/datastore/_generated/datastore_pb2.py deleted file mode 100644 index f7a321a5c6b8..000000000000 --- a/datastore/google/cloud/datastore/_generated/datastore_pb2.py +++ /dev/null @@ -1,891 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/datastore.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 -from google.cloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1_dot_query__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/datastore.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n#google/datastore/v1/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1fgoogle/datastore/v1/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xdb\x06\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*B+\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_query__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - -_COMMITREQUEST_MODE = _descriptor.EnumDescriptor( - name='Mode', - full_name='google.datastore.v1.CommitRequest.Mode', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MODE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='TRANSACTIONAL', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NON_TRANSACTIONAL', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1178, - serialized_end=1248, -) -_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE) - -_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor( - name='ReadConsistency', - full_name='google.datastore.v1.ReadOptions.ReadConsistency', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='STRONG', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='EVENTUAL', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2007, - serialized_end=2084, -) -_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY) - - -_LOOKUPREQUEST = _descriptor.Descriptor( - name='LookupRequest', - full_name='google.datastore.v1.LookupRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.LookupRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='read_options', full_name='google.datastore.v1.LookupRequest.read_options', index=1, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.LookupRequest.keys', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=158, - serialized_end=289, -) - - -_LOOKUPRESPONSE = _descriptor.Descriptor( - name='LookupResponse', - full_name='google.datastore.v1.LookupResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='found', full_name='google.datastore.v1.LookupResponse.found', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='missing', full_name='google.datastore.v1.LookupResponse.missing', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='deferred', full_name='google.datastore.v1.LookupResponse.deferred', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=292, - serialized_end=454, -) - - -_RUNQUERYREQUEST = _descriptor.Descriptor( - name='RunQueryRequest', - full_name='google.datastore.v1.RunQueryRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.RunQueryRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='partition_id', full_name='google.datastore.v1.RunQueryRequest.partition_id', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='read_options', full_name='google.datastore.v1.RunQueryRequest.read_options', index=2, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='query', full_name='google.datastore.v1.RunQueryRequest.query', index=3, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='gql_query', full_name='google.datastore.v1.RunQueryRequest.gql_query', index=4, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='query_type', full_name='google.datastore.v1.RunQueryRequest.query_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=457, - serialized_end=717, -) - - -_RUNQUERYRESPONSE = _descriptor.Descriptor( - name='RunQueryResponse', - full_name='google.datastore.v1.RunQueryResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='batch', full_name='google.datastore.v1.RunQueryResponse.batch', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='query', full_name='google.datastore.v1.RunQueryResponse.query', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=719, - serialized_end=834, -) - - -_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( - name='BeginTransactionRequest', - full_name='google.datastore.v1.BeginTransactionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.BeginTransactionRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=836, - serialized_end=881, -) - - -_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( - name='BeginTransactionResponse', - full_name='google.datastore.v1.BeginTransactionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.BeginTransactionResponse.transaction', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=883, - serialized_end=930, -) - - -_ROLLBACKREQUEST = _descriptor.Descriptor( - name='RollbackRequest', - full_name='google.datastore.v1.RollbackRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.RollbackRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.RollbackRequest.transaction', index=1, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=932, - serialized_end=990, -) - - -_ROLLBACKRESPONSE = _descriptor.Descriptor( - name='RollbackResponse', - full_name='google.datastore.v1.RollbackResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=992, - serialized_end=1010, -) - - -_COMMITREQUEST = _descriptor.Descriptor( - name='CommitRequest', - full_name='google.datastore.v1.CommitRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.CommitRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='mode', full_name='google.datastore.v1.CommitRequest.mode', index=1, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.CommitRequest.transaction', index=2, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='mutations', full_name='google.datastore.v1.CommitRequest.mutations', index=3, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _COMMITREQUEST_MODE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='transaction_selector', full_name='google.datastore.v1.CommitRequest.transaction_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1013, - serialized_end=1272, -) - - -_COMMITRESPONSE = _descriptor.Descriptor( - name='CommitResponse', - full_name='google.datastore.v1.CommitResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='mutation_results', full_name='google.datastore.v1.CommitResponse.mutation_results', index=0, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index_updates', full_name='google.datastore.v1.CommitResponse.index_updates', index=1, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1274, - serialized_end=1376, -) - - -_ALLOCATEIDSREQUEST = _descriptor.Descriptor( - name='AllocateIdsRequest', - full_name='google.datastore.v1.AllocateIdsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.AllocateIdsRequest.project_id', index=0, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.AllocateIdsRequest.keys', index=1, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1378, - serialized_end=1458, -) - - -_ALLOCATEIDSRESPONSE = _descriptor.Descriptor( - name='AllocateIdsResponse', - full_name='google.datastore.v1.AllocateIdsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='keys', full_name='google.datastore.v1.AllocateIdsResponse.keys', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1460, - serialized_end=1521, -) - - -_MUTATION = _descriptor.Descriptor( - name='Mutation', - full_name='google.datastore.v1.Mutation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='insert', full_name='google.datastore.v1.Mutation.insert', index=0, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='update', full_name='google.datastore.v1.Mutation.update', index=1, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='upsert', full_name='google.datastore.v1.Mutation.upsert', index=2, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='delete', full_name='google.datastore.v1.Mutation.delete', index=3, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='base_version', full_name='google.datastore.v1.Mutation.base_version', index=4, - number=8, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='operation', full_name='google.datastore.v1.Mutation.operation', - index=0, containing_type=None, fields=[]), - _descriptor.OneofDescriptor( - name='conflict_detection_strategy', full_name='google.datastore.v1.Mutation.conflict_detection_strategy', - index=1, containing_type=None, fields=[]), - ], - serialized_start=1524, - serialized_end=1787, -) - - -_MUTATIONRESULT = _descriptor.Descriptor( - name='MutationResult', - full_name='google.datastore.v1.MutationResult', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.MutationResult.key', index=0, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='version', full_name='google.datastore.v1.MutationResult.version', index=1, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='conflict_detected', full_name='google.datastore.v1.MutationResult.conflict_detected', index=2, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1789, - serialized_end=1888, -) - - -_READOPTIONS = _descriptor.Descriptor( - name='ReadOptions', - full_name='google.datastore.v1.ReadOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='read_consistency', full_name='google.datastore.v1.ReadOptions.read_consistency', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.datastore.v1.ReadOptions.transaction', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _READOPTIONS_READCONSISTENCY, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_type', full_name='google.datastore.v1.ReadOptions.consistency_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1891, - serialized_end=2104, -) - -_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS -_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT -_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._ENTITYRESULT -_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._PARTITIONID -_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS -_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY -_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._GQLQUERY -_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( - _RUNQUERYREQUEST.fields_by_name['query']) -_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] -_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( - _RUNQUERYREQUEST.fields_by_name['gql_query']) -_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] -_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERYRESULTBATCH -_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1_dot_query__pb2._QUERY -_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE -_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION -_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST -_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append( - _COMMITREQUEST.fields_by_name['transaction']) -_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector'] -_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT -_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['insert']) -_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['update']) -_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['upsert']) -_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['operation'].fields.append( - _MUTATION.fields_by_name['delete']) -_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation'] -_MUTATION.oneofs_by_name['conflict_detection_strategy'].fields.append( - _MUTATION.fields_by_name['base_version']) -_MUTATION.fields_by_name['base_version'].containing_oneof = _MUTATION.oneofs_by_name['conflict_detection_strategy'] -_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._KEY -_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY -_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS -_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( - _READOPTIONS.fields_by_name['read_consistency']) -_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] -_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( - _READOPTIONS.fields_by_name['transaction']) -_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] -DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST -DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE -DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST -DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE -DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST -DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE -DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST -DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE -DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST -DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE -DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST -DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE -DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION -DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT -DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS - -LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict( - DESCRIPTOR = _LOOKUPREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupRequest) - )) -_sym_db.RegisterMessage(LookupRequest) - -LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict( - DESCRIPTOR = _LOOKUPRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.LookupResponse) - )) -_sym_db.RegisterMessage(LookupResponse) - -RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryRequest) - )) -_sym_db.RegisterMessage(RunQueryRequest) - -RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RunQueryResponse) - )) -_sym_db.RegisterMessage(RunQueryResponse) - -BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionRequest) - )) -_sym_db.RegisterMessage(BeginTransactionRequest) - -BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.BeginTransactionResponse) - )) -_sym_db.RegisterMessage(BeginTransactionResponse) - -RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict( - DESCRIPTOR = _ROLLBACKREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackRequest) - )) -_sym_db.RegisterMessage(RollbackRequest) - -RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict( - DESCRIPTOR = _ROLLBACKRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.RollbackResponse) - )) -_sym_db.RegisterMessage(RollbackResponse) - -CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict( - DESCRIPTOR = _COMMITREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitRequest) - )) -_sym_db.RegisterMessage(CommitRequest) - -CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict( - DESCRIPTOR = _COMMITRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CommitResponse) - )) -_sym_db.RegisterMessage(CommitResponse) - -AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict( - DESCRIPTOR = _ALLOCATEIDSREQUEST, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsRequest) - )) -_sym_db.RegisterMessage(AllocateIdsRequest) - -AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict( - DESCRIPTOR = _ALLOCATEIDSRESPONSE, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.AllocateIdsResponse) - )) -_sym_db.RegisterMessage(AllocateIdsResponse) - -Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( - DESCRIPTOR = _MUTATION, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Mutation) - )) -_sym_db.RegisterMessage(Mutation) - -MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict( - DESCRIPTOR = _MUTATIONRESULT, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.MutationResult) - )) -_sym_db.RegisterMessage(MutationResult) - -ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict( - DESCRIPTOR = _READOPTIONS, - __module__ = 'google.datastore.v1.datastore_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.ReadOptions) - )) -_sym_db.RegisterMessage(ReadOptions) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\016DatastoreProtoP\001')) -# @@protoc_insertion_point(module_scope) diff --git a/datastore/google/cloud/datastore/_generated/entity_pb2.py b/datastore/google/cloud/datastore/_generated/entity_pb2.py deleted file mode 100644 index 44d530bdb74a..000000000000 --- a/datastore/google/cloud/datastore/_generated/entity_pb2.py +++ /dev/null @@ -1,495 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/entity.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/entity.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n google/datastore/v1/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42(\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - - -_PARTITIONID = _descriptor.Descriptor( - name='PartitionId', - full_name='google.datastore.v1.PartitionId', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_id', full_name='google.datastore.v1.PartitionId.project_id', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='namespace_id', full_name='google.datastore.v1.PartitionId.namespace_id', index=1, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=176, - serialized_end=231, -) - - -_KEY_PATHELEMENT = _descriptor.Descriptor( - name='PathElement', - full_name='google.datastore.v1.Key.PathElement', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='kind', full_name='google.datastore.v1.Key.PathElement.kind', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='id', full_name='google.datastore.v1.Key.PathElement.id', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.Key.PathElement.name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='id_type', full_name='google.datastore.v1.Key.PathElement.id_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=349, - serialized_end=417, -) - -_KEY = _descriptor.Descriptor( - name='Key', - full_name='google.datastore.v1.Key', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='partition_id', full_name='google.datastore.v1.Key.partition_id', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='path', full_name='google.datastore.v1.Key.path', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_KEY_PATHELEMENT, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=234, - serialized_end=417, -) - - -_ARRAYVALUE = _descriptor.Descriptor( - name='ArrayValue', - full_name='google.datastore.v1.ArrayValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='google.datastore.v1.ArrayValue.values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=419, - serialized_end=475, -) - - -_VALUE = _descriptor.Descriptor( - name='Value', - full_name='google.datastore.v1.Value', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='null_value', full_name='google.datastore.v1.Value.null_value', index=0, - number=11, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='google.datastore.v1.Value.boolean_value', index=1, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='integer_value', full_name='google.datastore.v1.Value.integer_value', index=2, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.datastore.v1.Value.double_value', index=3, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timestamp_value', full_name='google.datastore.v1.Value.timestamp_value', index=4, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key_value', full_name='google.datastore.v1.Value.key_value', index=5, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.datastore.v1.Value.string_value', index=6, - number=17, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='blob_value', full_name='google.datastore.v1.Value.blob_value', index=7, - number=18, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='geo_point_value', full_name='google.datastore.v1.Value.geo_point_value', index=8, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_value', full_name='google.datastore.v1.Value.entity_value', index=9, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='array_value', full_name='google.datastore.v1.Value.array_value', index=10, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='meaning', full_name='google.datastore.v1.Value.meaning', index=11, - number=14, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='exclude_from_indexes', full_name='google.datastore.v1.Value.exclude_from_indexes', index=12, - number=19, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value_type', full_name='google.datastore.v1.Value.value_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=478, - serialized_end=975, -) - - -_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor( - name='PropertiesEntry', - full_name='google.datastore.v1.Entity.PropertiesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.Entity.PropertiesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.Entity.PropertiesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1092, - serialized_end=1169, -) - -_ENTITY = _descriptor.Descriptor( - name='Entity', - full_name='google.datastore.v1.Entity', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.Entity.key', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='properties', full_name='google.datastore.v1.Entity.properties', index=1, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_ENTITY_PROPERTIESENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=978, - serialized_end=1169, -) - -_KEY_PATHELEMENT.containing_type = _KEY -_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( - _KEY_PATHELEMENT.fields_by_name['id']) -_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] -_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( - _KEY_PATHELEMENT.fields_by_name['name']) -_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] -_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID -_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT -_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE -_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE -_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VALUE.fields_by_name['key_value'].message_type = _KEY -_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG -_VALUE.fields_by_name['entity_value'].message_type = _ENTITY -_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['null_value']) -_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['boolean_value']) -_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['integer_value']) -_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['double_value']) -_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['timestamp_value']) -_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['key_value']) -_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['string_value']) -_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['blob_value']) -_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['geo_point_value']) -_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['entity_value']) -_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['array_value']) -_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE -_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY -_ENTITY.fields_by_name['key'].message_type = _KEY -_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY -DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID -DESCRIPTOR.message_types_by_name['Key'] = _KEY -DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE -DESCRIPTOR.message_types_by_name['Value'] = _VALUE -DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY - -PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict( - DESCRIPTOR = _PARTITIONID, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PartitionId) - )) -_sym_db.RegisterMessage(PartitionId) - -Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict( - - PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict( - DESCRIPTOR = _KEY_PATHELEMENT, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Key.PathElement) - )) - , - DESCRIPTOR = _KEY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Key) - )) -_sym_db.RegisterMessage(Key) -_sym_db.RegisterMessage(Key.PathElement) - -ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict( - DESCRIPTOR = _ARRAYVALUE, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.ArrayValue) - )) -_sym_db.RegisterMessage(ArrayValue) - -Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( - DESCRIPTOR = _VALUE, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Value) - )) -_sym_db.RegisterMessage(Value) - -Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( - - PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict( - DESCRIPTOR = _ENTITY_PROPERTIESENTRY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity.PropertiesEntry) - )) - , - DESCRIPTOR = _ENTITY, - __module__ = 'google.datastore.v1.entity_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Entity) - )) -_sym_db.RegisterMessage(Entity) -_sym_db.RegisterMessage(Entity.PropertiesEntry) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\013EntityProtoP\001')) -_ENTITY_PROPERTIESENTRY.has_options = True -_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -# @@protoc_insertion_point(module_scope) diff --git a/datastore/google/cloud/datastore/_generated/query_pb2.py b/datastore/google/cloud/datastore/_generated/query_pb2.py deleted file mode 100644 index 7569f225d53a..000000000000 --- a/datastore/google/cloud/datastore/_generated/query_pb2.py +++ /dev/null @@ -1,934 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/datastore/v1/query.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1_dot_entity__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/datastore/v1/query.proto', - package='google.datastore.v1', - syntax='proto3', - serialized_pb=_b('\n\x1fgoogle/datastore/v1/query.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a google/datastore/v1/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x18google/type/latlng.proto\"\xaf\x01\n\x0c\x45ntityResult\x12+\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x1b.google.datastore.v1.Entity\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xf2\x02\n\x05Query\x12\x33\n\nprojection\x18\x02 \x03(\x0b\x32\x1f.google.datastore.v1.Projection\x12\x31\n\x04kind\x18\x03 \x03(\x0b\x32#.google.datastore.v1.KindExpression\x12+\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.Filter\x12\x31\n\x05order\x18\x05 \x03(\x0b\x32\".google.datastore.v1.PropertyOrder\x12;\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"F\n\nProjection\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\"\xd1\x01\n\rPropertyOrder\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12?\n\tdirection\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x99\x01\n\x06\x46ilter\x12@\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32$.google.datastore.v1.CompositeFilterH\x00\x12>\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\'\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01\x62\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - - -_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( - name='ResultType', - full_name='google.datastore.v1.EntityResult.ResultType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='RESULT_TYPE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='FULL', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='PROJECTION', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='KEY_ONLY', index=3, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=273, - serialized_end=354, -) -_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE) - -_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor( - name='Direction', - full_name='google.datastore.v1.PropertyOrder.Direction', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='DIRECTION_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ASCENDING', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DESCENDING', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1009, - serialized_end=1078, -) -_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION) - -_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.datastore.v1.CompositeFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='AND', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1361, - serialized_end=1406, -) -_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR) - -_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.datastore.v1.PropertyFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN_OR_EQUAL', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN', index=3, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN_OR_EQUAL', index=4, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='EQUAL', index=5, number=5, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='HAS_ANCESTOR', index=6, number=11, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1587, - serialized_end=1736, -) -_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR) - -_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor( - name='MoreResultsType', - full_name='google.datastore.v1.QueryResultBatch.MoreResultsType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NOT_FINISHED', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='NO_MORE_RESULTS', index=4, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2463, - serialized_end=2615, -) -_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE) - - -_ENTITYRESULT = _descriptor.Descriptor( - name='EntityResult', - full_name='google.datastore.v1.EntityResult', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity', full_name='google.datastore.v1.EntityResult.entity', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='version', full_name='google.datastore.v1.EntityResult.version', index=1, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='cursor', full_name='google.datastore.v1.EntityResult.cursor', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _ENTITYRESULT_RESULTTYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=179, - serialized_end=354, -) - - -_QUERY = _descriptor.Descriptor( - name='Query', - full_name='google.datastore.v1.Query', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='projection', full_name='google.datastore.v1.Query.projection', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='kind', full_name='google.datastore.v1.Query.kind', index=1, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filter', full_name='google.datastore.v1.Query.filter', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='order', full_name='google.datastore.v1.Query.order', index=3, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='distinct_on', full_name='google.datastore.v1.Query.distinct_on', index=4, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='start_cursor', full_name='google.datastore.v1.Query.start_cursor', index=5, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_cursor', full_name='google.datastore.v1.Query.end_cursor', index=6, - number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='offset', full_name='google.datastore.v1.Query.offset', index=7, - number=10, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='limit', full_name='google.datastore.v1.Query.limit', index=8, - number=12, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=357, - serialized_end=727, -) - - -_KINDEXPRESSION = _descriptor.Descriptor( - name='KindExpression', - full_name='google.datastore.v1.KindExpression', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.KindExpression.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=729, - serialized_end=759, -) - - -_PROPERTYREFERENCE = _descriptor.Descriptor( - name='PropertyReference', - full_name='google.datastore.v1.PropertyReference', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.datastore.v1.PropertyReference.name', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=761, - serialized_end=794, -) - - -_PROJECTION = _descriptor.Descriptor( - name='Projection', - full_name='google.datastore.v1.Projection', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.Projection.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=796, - serialized_end=866, -) - - -_PROPERTYORDER = _descriptor.Descriptor( - name='PropertyOrder', - full_name='google.datastore.v1.PropertyOrder', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.PropertyOrder.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='direction', full_name='google.datastore.v1.PropertyOrder.direction', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _PROPERTYORDER_DIRECTION, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=869, - serialized_end=1078, -) - - -_FILTER = _descriptor.Descriptor( - name='Filter', - full_name='google.datastore.v1.Filter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='composite_filter', full_name='google.datastore.v1.Filter.composite_filter', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='property_filter', full_name='google.datastore.v1.Filter.property_filter', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='filter_type', full_name='google.datastore.v1.Filter.filter_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1081, - serialized_end=1234, -) - - -_COMPOSITEFILTER = _descriptor.Descriptor( - name='CompositeFilter', - full_name='google.datastore.v1.CompositeFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='op', full_name='google.datastore.v1.CompositeFilter.op', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filters', full_name='google.datastore.v1.CompositeFilter.filters', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _COMPOSITEFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1237, - serialized_end=1406, -) - - -_PROPERTYFILTER = _descriptor.Descriptor( - name='PropertyFilter', - full_name='google.datastore.v1.PropertyFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='property', full_name='google.datastore.v1.PropertyFilter.property', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='op', full_name='google.datastore.v1.PropertyFilter.op', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.PropertyFilter.value', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _PROPERTYFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1409, - serialized_end=1736, -) - - -_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor( - name='NamedBindingsEntry', - full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.GqlQuery.NamedBindingsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1940, - serialized_end=2032, -) - -_GQLQUERY = _descriptor.Descriptor( - name='GqlQuery', - full_name='google.datastore.v1.GqlQuery', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='query_string', full_name='google.datastore.v1.GqlQuery.query_string', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='allow_literals', full_name='google.datastore.v1.GqlQuery.allow_literals', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='named_bindings', full_name='google.datastore.v1.GqlQuery.named_bindings', index=2, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='positional_bindings', full_name='google.datastore.v1.GqlQuery.positional_bindings', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1739, - serialized_end=2032, -) - - -_GQLQUERYPARAMETER = _descriptor.Descriptor( - name='GqlQueryParameter', - full_name='google.datastore.v1.GqlQueryParameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='google.datastore.v1.GqlQueryParameter.value', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='cursor', full_name='google.datastore.v1.GqlQueryParameter.cursor', index=1, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='parameter_type', full_name='google.datastore.v1.GqlQueryParameter.parameter_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=2034, - serialized_end=2134, -) - - -_QUERYRESULTBATCH = _descriptor.Descriptor( - name='QueryResultBatch', - full_name='google.datastore.v1.QueryResultBatch', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='skipped_results', full_name='google.datastore.v1.QueryResultBatch.skipped_results', index=0, - number=6, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='skipped_cursor', full_name='google.datastore.v1.QueryResultBatch.skipped_cursor', index=1, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_result_type', full_name='google.datastore.v1.QueryResultBatch.entity_result_type', index=2, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='entity_results', full_name='google.datastore.v1.QueryResultBatch.entity_results', index=3, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_cursor', full_name='google.datastore.v1.QueryResultBatch.end_cursor', index=4, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='more_results', full_name='google.datastore.v1.QueryResultBatch.more_results', index=5, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='snapshot_version', full_name='google.datastore.v1.QueryResultBatch.snapshot_version', index=6, - number=7, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _QUERYRESULTBATCH_MORERESULTSTYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2137, - serialized_end=2615, -) - -_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._ENTITY -_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT -_QUERY.fields_by_name['projection'].message_type = _PROJECTION -_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION -_QUERY.fields_by_name['filter'].message_type = _FILTER -_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER -_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE -_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE -_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION -_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER -_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER -_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER -_FILTER.oneofs_by_name['filter_type'].fields.append( - _FILTER.fields_by_name['composite_filter']) -_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] -_FILTER.oneofs_by_name['filter_type'].fields.append( - _FILTER.fields_by_name['property_filter']) -_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] -_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR -_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER -_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER -_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE -_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR -_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE -_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER -_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER -_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY -_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY -_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER -_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1_dot_entity__pb2._VALUE -_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( - _GQLQUERYPARAMETER.fields_by_name['value']) -_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] -_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( - _GQLQUERYPARAMETER.fields_by_name['cursor']) -_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] -_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE -_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT -_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE -_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH -DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT -DESCRIPTOR.message_types_by_name['Query'] = _QUERY -DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION -DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE -DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION -DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER -DESCRIPTOR.message_types_by_name['Filter'] = _FILTER -DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER -DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER -DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY -DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER -DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH - -EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict( - DESCRIPTOR = _ENTITYRESULT, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.EntityResult) - )) -_sym_db.RegisterMessage(EntityResult) - -Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict( - DESCRIPTOR = _QUERY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - )) -_sym_db.RegisterMessage(Query) - -KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( - DESCRIPTOR = _KINDEXPRESSION, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - )) -_sym_db.RegisterMessage(KindExpression) - -PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYREFERENCE, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - )) -_sym_db.RegisterMessage(PropertyReference) - -Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( - DESCRIPTOR = _PROJECTION, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - )) -_sym_db.RegisterMessage(Projection) - -PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYORDER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - )) -_sym_db.RegisterMessage(PropertyOrder) - -Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( - DESCRIPTOR = _FILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - )) -_sym_db.RegisterMessage(Filter) - -CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( - DESCRIPTOR = _COMPOSITEFILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - )) -_sym_db.RegisterMessage(CompositeFilter) - -PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( - DESCRIPTOR = _PROPERTYFILTER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - )) -_sym_db.RegisterMessage(PropertyFilter) - -GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( - - NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - )) - , - DESCRIPTOR = _GQLQUERY, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - )) -_sym_db.RegisterMessage(GqlQuery) -_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) - -GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( - DESCRIPTOR = _GQLQUERYPARAMETER, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - )) -_sym_db.RegisterMessage(GqlQueryParameter) - -QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( - DESCRIPTOR = _QUERYRESULTBATCH, - __module__ = 'google.datastore.v1.query_pb2' - # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - )) -_sym_db.RegisterMessage(QueryResultBatch) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001')) -_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True -_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -# @@protoc_insertion_point(module_scope) diff --git a/datastore/google/cloud/datastore/_http.py b/datastore/google/cloud/datastore/_http.py index b66626fabccb..a6bae476dff8 100644 --- a/datastore/google/cloud/datastore/_http.py +++ b/datastore/google/cloud/datastore/_http.py @@ -25,14 +25,14 @@ from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.environment_vars import GCD_HOST from google.cloud import exceptions -from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 +from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 try: from grpc import StatusCode - from google.cloud.datastore._generated import datastore_grpc_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2_grpc except ImportError: # pragma: NO COVER _GRPC_ERROR_MAPPING = {} _HAVE_GRPC = False - datastore_grpc_pb2 = None + datastore_pb2_grpc = None StatusCode = None else: # NOTE: We don't include OK -> 200 or CANCELLED -> 499 @@ -147,10 +147,10 @@ def lookup(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :type request_pb: :class:`.datastore_pb2.LookupRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'lookup', request_pb, @@ -163,10 +163,10 @@ def run_query(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :type request_pb: :class:`.datastore_pb2.RunQueryRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'runQuery', request_pb, @@ -180,10 +180,10 @@ def begin_transaction(self, project, request_pb): usually your project name in the cloud console. :type request_pb: - :class:`._generated.datastore_pb2.BeginTransactionRequest` + :class:`.datastore_pb2.BeginTransactionRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'beginTransaction', request_pb, @@ -196,10 +196,10 @@ def commit(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :type request_pb: :class:`.datastore_pb2.CommitRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'commit', request_pb, @@ -212,10 +212,10 @@ def rollback(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :type request_pb: :class:`.datastore_pb2.RollbackRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'rollback', request_pb, @@ -228,10 +228,10 @@ def allocate_ids(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ return self._rpc(project, 'allocateIds', request_pb, @@ -280,10 +280,10 @@ def __init__(self, connection, secure): if secure: self._stub = make_secure_stub(connection.credentials, connection.USER_AGENT, - datastore_grpc_pb2.DatastoreStub, + datastore_pb2_grpc.DatastoreStub, connection.host) else: - self._stub = make_insecure_stub(datastore_grpc_pb2.DatastoreStub, + self._stub = make_insecure_stub(datastore_pb2_grpc.DatastoreStub, connection.host) def lookup(self, project, request_pb): @@ -293,10 +293,10 @@ def lookup(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.LookupRequest` + :type request_pb: :class:`.datastore_pb2.LookupRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.LookupResponse` + :rtype: :class:`.datastore_pb2.LookupResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -310,10 +310,10 @@ def run_query(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RunQueryRequest` + :type request_pb: :class:`.datastore_pb2.RunQueryRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RunQueryResponse` + :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -328,10 +328,10 @@ def begin_transaction(self, project, request_pb): usually your project name in the cloud console. :type request_pb: - :class:`._generated.datastore_pb2.BeginTransactionRequest` + :class:`.datastore_pb2.BeginTransactionRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` + :rtype: :class:`.datastore_pb2.BeginTransactionResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -345,10 +345,10 @@ def commit(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` + :type request_pb: :class:`.datastore_pb2.CommitRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.CommitResponse` + :rtype: :class:`.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -362,10 +362,10 @@ def rollback(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.RollbackRequest` + :type request_pb: :class:`.datastore_pb2.RollbackRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.RollbackResponse` + :rtype: :class:`.datastore_pb2.RollbackResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -379,10 +379,10 @@ def allocate_ids(self, project, request_pb): :param project: The project to connect to. This is usually your project name in the cloud console. - :type request_pb: :class:`._generated.datastore_pb2.AllocateIdsRequest` + :type request_pb: :class:`.datastore_pb2.AllocateIdsRequest` :param request_pb: The request protobuf object. - :rtype: :class:`._generated.datastore_pb2.AllocateIdsResponse` + :rtype: :class:`.datastore_pb2.AllocateIdsResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project @@ -469,8 +469,7 @@ def lookup(self, project, key_pbs, Maps the ``DatastoreService.Lookup`` protobuf RPC. This uses mostly protobufs - (:class:`google.cloud.datastore._generated.entity_pb2.Key` as input - and :class:`google.cloud.datastore._generated.entity_pb2.Entity` + (:class:`.entity_pb2.Key` as input and :class:`.entity_pb2.Entity` as output). It is used under the hood in :meth:`Client.get() <.datastore.client.Client.get>`: @@ -493,7 +492,7 @@ def lookup(self, project, key_pbs, :param project: The project to look up the keys in. :type key_pbs: list of - :class:`google.cloud.datastore._generated.entity_pb2.Key` + :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type eventual: bool @@ -509,9 +508,9 @@ def lookup(self, project, key_pbs, :rtype: tuple :returns: A triple of (``results``, ``missing``, ``deferred``) where both ``results`` and ``missing`` are lists of - :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :class:`.entity_pb2.Entity` and ``deferred`` is a list of - :class:`google.cloud.datastore._generated.entity_pb2.Key`. + :class:`.entity_pb2.Key`. """ lookup_request = _datastore_pb2.LookupRequest() _set_read_options(lookup_request, eventual, transaction_id) @@ -543,7 +542,7 @@ def run_query(self, project, query_pb, namespace=None, :type project: str :param project: The project over which to run the query. - :type query_pb: :class:`.datastore._generated.query_pb2.Query` + :type query_pb: :class:`.query_pb2.Query` :param query_pb: The Protobuf representing the query to run. :type namespace: str @@ -602,7 +601,7 @@ def commit(self, project, request, transaction_id): :type project: str :param project: The project to which the transaction applies. - :type request: :class:`._generated.datastore_pb2.CommitRequest` + :type request: :class:`.datastore_pb2.CommitRequest` :param request: The protobuf with the mutations being committed. :type transaction_id: str @@ -616,7 +615,7 @@ def commit(self, project, request, transaction_id): :rtype: tuple :returns: The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key + :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ if transaction_id: @@ -654,10 +653,10 @@ def allocate_ids(self, project, key_pbs): :param project: The project to which the transaction belongs. :type key_pbs: list of - :class:`google.cloud.datastore._generated.entity_pb2.Key` + :class:`.entity_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. - :rtype: list of :class:`.datastore._generated.entity_pb2.Key` + :rtype: list of :class:`.entity_pb2.Key` :returns: An equal number of keys, with IDs filled in by the backend. """ request = _datastore_pb2.AllocateIdsRequest() @@ -691,7 +690,7 @@ def _add_keys_to_request(request_field_pb, key_pbs): :type request_field_pb: `RepeatedCompositeFieldContainer` :param request_field_pb: A repeated proto field that contains keys. - :type key_pbs: list of :class:`.datastore._generated.entity_pb2.Key` + :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to add to a request. """ for key_pb in key_pbs: @@ -701,12 +700,12 @@ def _add_keys_to_request(request_field_pb, key_pbs): def _parse_commit_response(commit_response_pb): """Extract response data from a commit response. - :type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse` + :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple :returns: The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key + :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ mut_results = commit_response_pb.mutation_results diff --git a/datastore/google/cloud/datastore/batch.py b/datastore/google/cloud/datastore/batch.py index 2c09f357ee2e..00854d2007b6 100644 --- a/datastore/google/cloud/datastore/batch.py +++ b/datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.datastore._generated import datastore_pb2 as _datastore_pb2 +from google.cloud.grpc.datastore.v1 import datastore_pb2 as _datastore_pb2 class Batch(object): @@ -106,7 +106,7 @@ def namespace(self): def _add_partial_key_entity_pb(self): """Adds a new mutation for an entity with a partial key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ @@ -116,7 +116,7 @@ def _add_partial_key_entity_pb(self): def _add_complete_key_entity_pb(self): """Adds a new mutation for an entity with a completed key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ @@ -129,7 +129,7 @@ def _add_complete_key_entity_pb(self): def _add_delete_key_pb(self): """Adds a new mutation for a key to be deleted. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :rtype: :class:`.entity_pb2.Key` :returns: The newly created key protobuf that will be deleted when sent with a commit. """ @@ -147,7 +147,7 @@ def mutations(self): built-up so far. :rtype: iterable - :returns: The list of :class:`._generated.datastore_pb2.Mutation` + :returns: The list of :class:`.datastore_pb2.Mutation` protobufs to be sent in the commit request. """ return self._commit_request.mutations @@ -302,7 +302,7 @@ def _assign_entity_to_pb(entity_pb, entity): Helper method for ``Batch.put``. - :type entity_pb: :class:`._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: The entity owned by a mutation. :type entity: :class:`google.cloud.datastore.entity.Entity` diff --git a/datastore/google/cloud/datastore/client.py b/datastore/google/cloud/datastore/client.py index 809c39a902ed..42b0c6497f88 100644 --- a/datastore/google/cloud/datastore/client.py +++ b/datastore/google/cloud/datastore/client.py @@ -78,7 +78,7 @@ def _extended_lookup(connection, project, key_pbs, :type project: str :param project: The project to make the request for. - :type key_pbs: list of :class:`._generated.entity_pb2.Key` + :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type missing: list @@ -100,7 +100,7 @@ def _extended_lookup(connection, project, key_pbs, the given transaction. Incompatible with ``eventual==True``. - :rtype: list of :class:`._generated.entity_pb2.Entity` + :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or empty list. @@ -157,26 +157,29 @@ class Client(_BaseClient, _ClientProjectMixin): :type namespace: str :param namespace: (optional) namespace to pass to proxied API methods. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection def __init__(self, project=None, namespace=None, credentials=None, http=None): _ClientProjectMixin.__init__(self, project=project) + _BaseClient.__init__(self, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) + self.namespace = namespace self._batch_stack = _LocalStack() - super(Client, self).__init__(credentials, http) @staticmethod def _determine_default(project): diff --git a/datastore/google/cloud/datastore/helpers.py b/datastore/google/cloud/datastore/helpers.py index 13723bdb0fa5..ced1b83f20e7 100644 --- a/datastore/google/cloud/datastore/helpers.py +++ b/datastore/google/cloud/datastore/helpers.py @@ -26,7 +26,7 @@ from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 +from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -36,7 +36,7 @@ def _get_meaning(value_pb, is_list=False): """Get the meaning from a protobuf value. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The protobuf value to be checked for an associated meaning. @@ -77,13 +77,13 @@ def _get_meaning(value_pb, is_list=False): def _new_value_pb(entity_pb, name): """Add (by name) a new ``Value`` protobuf to an entity protobuf. - :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to add a new property to. :type name: str :param name: The name of the new property. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :rtype: :class:`.entity_pb2.Value` :returns: The new ``Value`` protobuf that was added to the entity. """ return entity_pb.properties.get_or_create(name) @@ -92,7 +92,7 @@ def _new_value_pb(entity_pb, name): def _property_tuples(entity_pb): """Iterator of name, ``Value`` tuples from entity properties. - :type entity_pb: :class:`.datastore._generated.entity_pb2.Entity` + :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to add a new property to. :rtype: :class:`generator` @@ -108,7 +108,7 @@ def entity_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :type pb: :class:`.entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`google.cloud.datastore.entity.Entity` @@ -168,7 +168,7 @@ def _set_pb_meaning_from_entity(entity, name, value, value_pb, :type value: object :param value: The current value stored as property ``name``. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The protobuf value to add meaning / meanings to. :type is_list: bool @@ -203,7 +203,7 @@ def entity_to_protobuf(entity): :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity to be turned into a protobuf. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :rtype: :class:`.entity_pb2.Entity` :returns: The protobuf representing the entity. """ entity_pb = _entity_pb2.Entity() @@ -241,7 +241,7 @@ def key_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :type pb: :class:`.entity_pb2.Key` :param pb: The Protobuf representing the key. :rtype: :class:`google.cloud.datastore.key.Key` @@ -339,7 +339,7 @@ def _get_value_from_value_pb(value_pb): Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The Value Protobuf. :rtype: object @@ -399,7 +399,7 @@ def _set_protobuf_value(value_pb, val): Some value types (entities, keys, lists) cannot be directly assigned; this function handles them correctly. - :type value_pb: :class:`google.cloud.datastore._generated.entity_pb2.Value` + :type value_pb: :class:`.entity_pb2.Value` :param value_pb: The value protobuf to which the value is being assigned. :type val: :class:`datetime.datetime`, boolean, float, integer, string, diff --git a/datastore/google/cloud/datastore/key.py b/datastore/google/cloud/datastore/key.py index c33e590a2581..0af884c67301 100644 --- a/datastore/google/cloud/datastore/key.py +++ b/datastore/google/cloud/datastore/key.py @@ -17,7 +17,7 @@ import copy import six -from google.cloud.datastore._generated import entity_pb2 as _entity_pb2 +from google.cloud.grpc.datastore.v1 import entity_pb2 as _entity_pb2 class Key(object): @@ -261,7 +261,7 @@ def completed_key(self, id_or_name): def to_protobuf(self): """Return a protobuf corresponding to the key. - :rtype: :class:`google.cloud.datastore._generated.entity_pb2.Key` + :rtype: :class:`.entity_pb2.Key` :returns: The protobuf representing the key. """ key = _entity_pb2.Key() diff --git a/datastore/google/cloud/datastore/query.py b/datastore/google/cloud/datastore/query.py index a6f6c845e17f..e8989a41a9dd 100644 --- a/datastore/google/cloud/datastore/query.py +++ b/datastore/google/cloud/datastore/query.py @@ -20,7 +20,7 @@ from google.cloud.iterator import Iterator as BaseIterator from google.cloud.iterator import Page -from google.cloud.datastore._generated import query_pb2 as _query_pb2 +from google.cloud.grpc.datastore.v1 import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key @@ -417,7 +417,7 @@ def _build_protobuf(self): Relies on the current state of the iterator. :rtype: - :class:`google.cloud.datastore._generated.query_pb2.Query` + :class:`.query_pb2.Query` :returns: The query protobuf object for the current state of the iterator. """ @@ -452,7 +452,7 @@ def _process_query_results(self, entity_pbs, cursor_as_bytes, :param cursor_as_bytes: The end cursor of the query. :type more_results_enum: - :class:`._generated.query_pb2.QueryResultBatch.MoreResultsType` + :class:`.query_pb2.QueryResultBatch.MoreResultsType` :param more_results_enum: Enum indicating if there are more results. :type skipped_results: int @@ -508,7 +508,7 @@ def _pb_from_query(query): :type query: :class:`Query` :param query: The source query. - :rtype: :class:`google.cloud.datastore._generated.query_pb2.Query` + :rtype: :class:`.query_pb2.Query` :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). @@ -575,7 +575,7 @@ def _item_to_entity(iterator, entity_pb): :param iterator: The iterator that is currently in use. :type entity_pb: - :class:`google.cloud.datastore._generated.entity_pb2.Entity` + :class:`.entity_pb2.Entity` :param entity_pb: An entity protobuf to convert to a native entity. :rtype: :class:`~google.cloud.datastore.entity.Entity` diff --git a/datastore/setup.py b/datastore/setup.py index 3de3a0b91a84..8dee2f7bfc62 100644 --- a/datastore/setup.py +++ b/datastore/setup.py @@ -52,6 +52,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.22.1, < 0.23dev', 'grpcio >= 1.0.2, < 2.0dev', + 'gapic-google-cloud-datastore-v1 >= 0.14.0, < 0.15dev', ] setup( diff --git a/datastore/unit_tests/test__http.py b/datastore/unit_tests/test__http.py index 35781cdf3a40..6515767bd0ca 100644 --- a/datastore/unit_tests/test__http.py +++ b/datastore/unit_tests/test__http.py @@ -223,7 +223,7 @@ def test_constructor(self): self.assertEqual(mock_args, [( conn.credentials, conn.USER_AGENT, - MUT.datastore_grpc_pb2.DatastoreStub, + MUT.datastore_pb2_grpc.DatastoreStub, conn.host, )]) @@ -242,7 +242,7 @@ def test_constructor_insecure(self): self.assertIs(datastore_api._stub, stub) self.assertEqual(mock_args, [( - MUT.datastore_grpc_pb2.DatastoreStub, + MUT.datastore_pb2_grpc.DatastoreStub, conn.host, )]) @@ -370,7 +370,7 @@ def _make_key_pb(self, project, id_=1234): return Key(*path_args, project=project).to_protobuf() def _make_query_pb(self, kind): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = query_pb2.Query() pb.kind.add().name = kind return pb @@ -508,7 +508,7 @@ def test_build_api_url_w_explicit_base_version(self): URI) def test_lookup_single_key_empty_response(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -535,7 +535,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -574,7 +574,7 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -604,8 +604,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) @@ -636,7 +636,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -665,7 +665,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -699,7 +699,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) @@ -735,8 +735,8 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(key_pb2, keys[1]) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -773,8 +773,8 @@ def test_run_query_w_eventual_no_transaction(self): self.assertEqual(request.read_options.transaction, b'') def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -813,8 +813,8 @@ def test_run_query_wo_eventual_w_transaction(self): self.assertEqual(request.read_options.transaction, TRANSACTION) def test_run_query_w_eventual_and_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -831,8 +831,8 @@ def test_run_query_w_eventual_and_transaction(self): eventual=True, transaction_id=TRANSACTION) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' @@ -865,8 +865,8 @@ def test_run_query_wo_namespace_empty_result(self): self.assertEqual(request.query, q_pb) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 PROJECT = 'PROJECT' KIND = 'Kind' @@ -895,7 +895,7 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.query, q_pb) def test_begin_transaction(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' @@ -918,7 +918,7 @@ def test_begin_transaction(self): def test_commit_wo_transaction(self): import mock - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -966,7 +966,7 @@ def mock_parse(response): def test_commit_w_transaction(self): import mock - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' @@ -1013,7 +1013,7 @@ def mock_parse(response): self.assertEqual(_parsed, [rsp_pb]) def test_rollback_ok(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'xact' @@ -1035,7 +1035,7 @@ def test_rollback_ok(self): self.assertEqual(request.transaction, TRANSACTION) def test_allocate_ids_empty(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() @@ -1056,7 +1056,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 PROJECT = 'PROJECT' before_key_pbs = [ @@ -1097,8 +1097,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.datastore._generated import datastore_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 index_updates = 1337 keys = [ diff --git a/datastore/unit_tests/test_batch.py b/datastore/unit_tests/test_batch.py index 0bdc8762e64c..7681a8fd9201 100644 --- a/datastore/unit_tests/test_batch.py +++ b/datastore/unit_tests/test_batch.py @@ -27,7 +27,7 @@ def _make_one(self, client): return self._get_target_class()(client) def test_ctor(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' _NAMESPACE = 'NAMESPACE' connection = _Connection() @@ -416,7 +416,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/datastore/unit_tests/test_client.py b/datastore/unit_tests/test_client.py index f6e016d03712..67a0229870c0 100644 --- a/datastore/unit_tests/test_client.py +++ b/datastore/unit_tests/test_client.py @@ -23,7 +23,7 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -118,13 +118,15 @@ class TestClient(unittest.TestCase): PROJECT = 'PROJECT' def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection + from google.cloud.datastore import client as MUT + + self.original_cnxn_class = MUT.Connection + MUT.Connection = _MockConnection def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class + from google.cloud.datastore import client as MUT + + MUT.Connection = self.original_cnxn_class @staticmethod def _get_target_class(): @@ -273,7 +275,7 @@ def test_get_multi_miss(self): self.assertEqual(results, []) def test_get_multi_miss_w_missing(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -337,7 +339,7 @@ def test_get_multi_miss_w_deferred(self): [key.to_protobuf()]) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -1002,7 +1004,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.project_id = self.project diff --git a/datastore/unit_tests/test_helpers.py b/datastore/unit_tests/test_helpers.py index f3fa3391bbb7..f3d144e6a591 100644 --- a/datastore/unit_tests/test_helpers.py +++ b/datastore/unit_tests/test_helpers.py @@ -22,7 +22,7 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() name = 'foo' @@ -41,7 +41,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -63,7 +63,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -109,7 +109,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -133,7 +133,7 @@ def test_mismatched_value_indexed(self): self._call_fut(entity_pb) def test_entity_no_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 entity_pb = entity_pb2.Entity() entity = self._call_fut(entity_pb) @@ -142,7 +142,7 @@ def test_entity_no_key(self): self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -157,7 +157,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'FOO' @@ -214,7 +214,7 @@ def _compareEntityProto(self, entity_pb1, entity_pb2): self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -222,7 +222,7 @@ def test_empty(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -241,7 +241,7 @@ def test_key_only(self): self._compareEntityProto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -261,7 +261,7 @@ def test_simple_fields(self): self._compareEntityProto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -271,7 +271,7 @@ def test_with_empty_list(self): self._compareEntityProto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -324,7 +324,7 @@ def test_inverts_to_protobuf(self): self._compareEntityProto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -342,7 +342,7 @@ def test_meaning_with_change(self): self._compareEntityProto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -376,7 +376,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Key() if project is not None: pb.partition_id.project_id = project @@ -546,7 +546,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -556,7 +556,7 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) @@ -566,7 +566,7 @@ def test_datetime(self): self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() @@ -596,7 +596,7 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -612,7 +612,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_array(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values @@ -625,7 +625,7 @@ def test_array(self): def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -639,14 +639,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -661,7 +661,7 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 return entity_pb2.Value() def test_datetime(self): @@ -799,14 +799,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -815,7 +815,7 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() value_pb.array_value.values.add() @@ -825,7 +825,7 @@ def test_empty_array_value(self): self.assertEqual(None, result) def test_array_value(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning = 9 @@ -840,7 +840,7 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 @@ -857,7 +857,7 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 diff --git a/datastore/unit_tests/test_key.py b/datastore/unit_tests/test_key.py index b2227d297b31..ed2eb45b4cca 100644 --- a/datastore/unit_tests/test_key.py +++ b/datastore/unit_tests/test_key.py @@ -314,7 +314,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 _KIND = 'KIND' key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/datastore/unit_tests/test_query.py b/datastore/unit_tests/test_query.py index 0e431623e369..255bfa8f014f 100644 --- a/datastore/unit_tests/test_query.py +++ b/datastore/unit_tests/test_query.py @@ -389,7 +389,7 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) def test__build_protobuf_empty(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -401,7 +401,7 @@ def test__build_protobuf_empty(self): self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query client = _Client(None, None) @@ -429,7 +429,7 @@ def test__build_protobuf_all_values(self): self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -450,7 +450,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -478,7 +478,7 @@ def test__process_query_results_bad_enum(self): def test__next_page(self): from google.cloud.iterator import Page - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 from google.cloud.datastore.query import Query connection = _Connection() @@ -546,7 +546,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -574,7 +574,7 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._call_fut(_Query(ancestor=ancestor)) @@ -587,7 +587,7 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { @@ -603,7 +603,7 @@ def test_filter(self): def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) @@ -620,7 +620,7 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.datastore._generated import query_pb2 + from google.cloud.grpc.datastore.v1 import query_pb2 pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], diff --git a/datastore/unit_tests/test_transaction.py b/datastore/unit_tests/test_transaction.py index c09304df6f5b..6b6b005a6fa3 100644 --- a/datastore/unit_tests/test_transaction.py +++ b/datastore/unit_tests/test_transaction.py @@ -26,7 +26,7 @@ def _make_one(self, client, **kw): return self._get_target_class()(client, **kw) def test_ctor_defaults(self): - from google.cloud.datastore._generated import datastore_pb2 + from google.cloud.grpc.datastore.v1 import datastore_pb2 _PROJECT = 'PROJECT' connection = _Connection() @@ -178,7 +178,7 @@ class Foo(Exception): def _make_key(kind, id_, project): - from google.cloud.datastore._generated import entity_pb2 + from google.cloud.grpc.datastore.v1 import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project diff --git a/dns/google/cloud/dns/client.py b/dns/google/cloud/dns/client.py index 0cd2577bc775..429ebe941c04 100644 --- a/dns/google/cloud/dns/client.py +++ b/dns/google/cloud/dns/client.py @@ -29,20 +29,25 @@ class Client(JSONClient): passed when creating a zone. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, project=None, credentials=None, http=None): + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def quotas(self): """Return DNS quotas for the project associated with this client. diff --git a/docs/bigquery_snippets.py b/docs/bigquery_snippets.py index 2bba9acece81..204d7dc3a5aa 100644 --- a/docs/bigquery_snippets.py +++ b/docs/bigquery_snippets.py @@ -436,12 +436,13 @@ def table_upload_from_file(client, to_delete): @snippet -def table_delete(client, _): +def table_delete(client, to_delete): """Delete a table.""" DATASET_NAME = 'table_delete_dataset_%d' % (_millis(),) TABLE_NAME = 'table_create_table_%d' % (_millis(),) dataset = client.dataset(DATASET_NAME) dataset.create() + to_delete.append(dataset) table = dataset.table(TABLE_NAME, SCHEMA) table.create() diff --git a/docs/translate-client.rst b/docs/translate-client.rst index c724c9f2fe03..2bb0820ef5dd 100644 --- a/docs/translate-client.rst +++ b/docs/translate-client.rst @@ -1,5 +1,5 @@ -Translate Client -================ +Translation Client +================== .. automodule:: google.cloud.translate.client :members: diff --git a/docs/translate-usage.rst b/docs/translate-usage.rst index 501a402f9cef..54c86a37d706 100644 --- a/docs/translate-usage.rst +++ b/docs/translate-usage.rst @@ -1,10 +1,10 @@ Using the API ============= -With `Google Translate`_, you can dynamically translate text -between thousands of language pairs. The Google Translate API -lets websites and programs integrate with Google Translate -programmatically. Google Translate API is available as a +With `Google Cloud Translation`_, you can dynamically translate text +between thousands of language pairs. The Google Cloud Translation API +lets websites and programs integrate with Google Cloud Translation +programmatically. Google Cloud Translation is available as a paid service. See the `Pricing`_ and `FAQ`_ pages for details. Authentication / Configuration @@ -14,9 +14,9 @@ Authentication / Configuration your applications. - :class:`~google.cloud.translate.client.Client` objects hold both a ``key`` - and a connection to the Translate service. + and a connection to the Cloud Translation service. -- **An API key is required for Translate.** See +- **An API key is required for Google Cloud Translation.** See `Identifying your application to Google`_ for details. This is significantly different than the other clients in ``google-cloud-python``. @@ -39,13 +39,13 @@ well: >>> from google.cloud import translate >>> client = translate.Client('my-api-key', target_language='es') -The Google Translate API has three supported methods, and they +The Google Cloud Translation API has three supported methods, and they map to three methods on a client: :meth:`~google.cloud.translate.client.Client.get_languages`, :meth:`~google.cloud.translate.client.Client.detect_language` and :meth:`~google.cloud.translate.client.Client.translate`. -To get a list of languages supported by Google Translate +To get a list of languages supported by the Google Cloud Translation API .. code:: @@ -116,8 +116,8 @@ or to use a non-default target language: }, ] -.. _Google Translate: https://cloud.google.com/translate -.. _Pricing: https://cloud.google.com/translate/v2/pricing.html -.. _FAQ: https://cloud.google.com/translate/v2/faq.html -.. _Identifying your application to Google: https://cloud.google.com/translate/v2/using_rest#auth -.. _confidence: https://cloud.google.com/translate/v2/detecting-language-with-rest +.. _Google Cloud Translation: https://cloud.google.com/translation +.. _Pricing: https://cloud.google.com/translation/pricing +.. _FAQ: https://cloud.google.com/translation/faq +.. _Identifying your application to Google: https://cloud.google.com/translation/docs/translating-text +.. _confidence: https://cloud.google.com/translation/docs/detecting-language diff --git a/language/google/cloud/language/client.py b/language/google/cloud/language/client.py index 14a4e6444a65..bad3ac7be918 100644 --- a/language/google/cloud/language/client.py +++ b/language/google/cloud/language/client.py @@ -23,19 +23,25 @@ class Client(client_module.Client): """Client to bundle configuration needed for API requests. - :type credentials: :class:`~oauth2client.client.OAuth2Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for the - connection owned by this client. If not passed (and - if no ``http`` object is passed), falls back to the - default inferred from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, credentials=None, http=None): + super(Client, self).__init__( + credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def document_from_text(self, content, **kwargs): """Create a plain text document bound to this client. diff --git a/language/google/cloud/language/syntax.py b/language/google/cloud/language/syntax.py index 0c7260839e74..e2b4ab286f24 100644 --- a/language/google/cloud/language/syntax.py +++ b/language/google/cloud/language/syntax.py @@ -18,6 +18,8 @@ breaks a document down into tokens and sentences. """ +from google.cloud.language.sentiment import Sentiment + class PartOfSpeech(object): """Part of speech of a :class:`Token`.""" @@ -183,11 +185,19 @@ class Sentence(object): :param begin: The beginning offset of the sentence in the original document according to the encoding type specified in the API request. + + :type sentiment: :class:`~google.cloud.language.sentiment.Sentiment` + :param sentiment: + (Optional) For calls to + :meth:`~google.cloud.language.document.Document.annotate_text` where + ``include_sentiment`` is set to true, this field will contain the + sentiment for the sentence. """ - def __init__(self, content, begin): + def __init__(self, content, begin, sentiment=None): self.content = content self.begin = begin + self.sentiment = sentiment @classmethod def from_api_repr(cls, payload): @@ -200,4 +210,11 @@ def from_api_repr(cls, payload): :returns: The sentence parsed from the API representation. """ text_span = payload['text'] - return cls(text_span['content'], text_span['beginOffset']) + + try: + sentiment = Sentiment.from_api_repr(payload['sentiment']) + except KeyError: + sentiment = None + + return cls(text_span['content'], text_span['beginOffset'], + sentiment=sentiment) diff --git a/language/setup.py b/language/setup.py index 22538aebdeb9..4d6a2104f5b9 100644 --- a/language/setup.py +++ b/language/setup.py @@ -55,7 +55,7 @@ setup( name='google-cloud-language', - version='0.22.1', + version='0.22.2', description='Python Client for Google Cloud Natural Language', long_description=README, namespace_packages=[ diff --git a/language/unit_tests/test_syntax.py b/language/unit_tests/test_syntax.py index da6f09e54de4..b09bc2ce7004 100644 --- a/language/unit_tests/test_syntax.py +++ b/language/unit_tests/test_syntax.py @@ -125,3 +125,28 @@ def test_from_api_repr(self): sentence = klass.from_api_repr(payload) self.assertEqual(sentence.content, content) self.assertEqual(sentence.begin, begin) + self.assertEqual(sentence.sentiment, None) + + def test_from_api_repr_with_sentiment(self): + from google.cloud.language.sentiment import Sentiment + klass = self._get_target_class() + content = 'All the pretty horses.' + begin = -1 + score = 0.5 + magnitude = 0.5 + payload = { + 'text': { + 'content': content, + 'beginOffset': begin, + }, + 'sentiment': { + 'score': score, + 'magnitude': magnitude, + } + } + sentence = klass.from_api_repr(payload) + self.assertEqual(sentence.content, content) + self.assertEqual(sentence.begin, begin) + self.assertIsInstance(sentence.sentiment, Sentiment) + self.assertEqual(sentence.sentiment.score, score) + self.assertEqual(sentence.sentiment.magnitude, magnitude) diff --git a/logging/google/cloud/logging/_gax.py b/logging/google/cloud/logging/_gax.py index 0ffd46dd3fcb..7ddadba01d06 100644 --- a/logging/google/cloud/logging/_gax.py +++ b/logging/google/cloud/logging/_gax.py @@ -532,7 +532,7 @@ def make_gax_logging_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client(channel=channel) return _LoggingAPI(generated, client) @@ -548,7 +548,7 @@ def make_gax_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client(channel=channel) return _MetricsAPI(generated, client) @@ -564,7 +564,7 @@ def make_gax_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client(channel=channel) return _SinksAPI(generated, client) diff --git a/logging/google/cloud/logging/_http.py b/logging/google/cloud/logging/_http.py index 8d9eccc819d5..8056689235db 100644 --- a/logging/google/cloud/logging/_http.py +++ b/logging/google/cloud/logging/_http.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Create / interact with Stackdriver Logging connections.""" +"""Interact with Stackdriver Logging via JSON-over-HTTP.""" import functools @@ -67,7 +67,7 @@ class _LoggingAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_entries(self, projects, filter_=None, order_by=None, page_size=None, page_token=None): @@ -161,8 +161,7 @@ def write_entries(self, entries, logger_name=None, resource=None, if labels is not None: data['labels'] = labels - self._connection.api_request(method='POST', path='/entries:write', - data=data) + self.api_request(method='POST', path='/entries:write', data=data) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -177,7 +176,7 @@ def logger_delete(self, project, logger_name): :param logger_name: name of logger containing the log entries to delete """ path = '/projects/%s/logs/%s' % (project, logger_name) - self._connection.api_request(method='DELETE', path=path) + self.api_request(method='DELETE', path=path) class _SinksAPI(object): @@ -191,7 +190,7 @@ class _SinksAPI(object): """ def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. @@ -253,7 +252,7 @@ def sink_create(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self._connection.api_request(method='POST', path=target, data=data) + self.api_request(method='POST', path=target, data=data) def sink_get(self, project, sink_name): """API call: retrieve a sink resource. @@ -271,7 +270,7 @@ def sink_get(self, project, sink_name): :returns: The JSON sink object returned from the API. """ target = '/projects/%s/sinks/%s' % (project, sink_name) - return self._connection.api_request(method='GET', path=target) + return self.api_request(method='GET', path=target) def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. @@ -299,7 +298,7 @@ def sink_update(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self._connection.api_request(method='PUT', path=target, data=data) + self.api_request(method='PUT', path=target, data=data) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -314,7 +313,7 @@ def sink_delete(self, project, sink_name): :param sink_name: the name of the sink """ target = '/projects/%s/sinks/%s' % (project, sink_name) - self._connection.api_request(method='DELETE', path=target) + self.api_request(method='DELETE', path=target) class _MetricsAPI(object): @@ -328,7 +327,7 @@ class _MetricsAPI(object): """ def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. @@ -389,7 +388,7 @@ def metric_create(self, project, metric_name, filter_, description=None): 'filter': filter_, 'description': description, } - self._connection.api_request(method='POST', path=target, data=data) + self.api_request(method='POST', path=target, data=data) def metric_get(self, project, metric_name): """API call: retrieve a metric resource. @@ -407,7 +406,7 @@ def metric_get(self, project, metric_name): :returns: The JSON metric object returned from the API. """ target = '/projects/%s/metrics/%s' % (project, metric_name) - return self._connection.api_request(method='GET', path=target) + return self.api_request(method='GET', path=target) def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. @@ -434,7 +433,7 @@ def metric_update(self, project, metric_name, filter_, description): 'filter': filter_, 'description': description, } - self._connection.api_request(method='PUT', path=target, data=data) + self.api_request(method='PUT', path=target, data=data) def metric_delete(self, project, metric_name): """API call: delete a metric resource. @@ -449,7 +448,7 @@ def metric_delete(self, project, metric_name): :param metric_name: the name of the metric. """ target = '/projects/%s/metrics/%s' % (project, metric_name) - self._connection.api_request(method='DELETE', path=target) + self.api_request(method='DELETE', path=target) def _item_to_entry(iterator, resource, loggers): diff --git a/logging/google/cloud/logging/client.py b/logging/google/cloud/logging/client.py index c92f177eaac6..77e762e6c808 100644 --- a/logging/google/cloud/logging/client.py +++ b/logging/google/cloud/logging/client.py @@ -67,15 +67,16 @@ class Client(JSONClient): If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. @@ -86,12 +87,16 @@ class Client(JSONClient): variable """ - _connection_class = Connection - _logging_api = _sinks_api = _metrics_api = None + _logging_api = None + _sinks_api = None + _metrics_api = None def __init__(self, project=None, credentials=None, http=None, use_gax=None): - super(Client, self).__init__(project, credentials, http) + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) if use_gax is None: self._use_gax = _USE_GAX else: diff --git a/logging/google/cloud/logging/handlers/transports/background_thread.py b/logging/google/cloud/logging/handlers/transports/background_thread.py index c090474a540b..9c8ea85c937a 100644 --- a/logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/logging/google/cloud/logging/handlers/transports/background_thread.py @@ -150,9 +150,9 @@ class BackgroundThreadTransport(Transport): """ def __init__(self, client, name): - http = copy.deepcopy(client._connection.http) - self.client = client.__class__(client.project, - client._connection.credentials, http) + http = copy.deepcopy(client._http) + self.client = client.__class__( + client.project, client._credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/logging/google/cloud/logging/logger.py b/logging/google/cloud/logging/logger.py index 842481af42da..d5a5b201dca0 100644 --- a/logging/google/cloud/logging/logger.py +++ b/logging/google/cloud/logging/logger.py @@ -17,6 +17,7 @@ import json from google.protobuf.json_format import MessageToJson +from google.cloud._helpers import _datetime_to_rfc3339 class Logger(object): @@ -92,7 +93,7 @@ def batch(self, client=None): def _make_entry_resource(self, text=None, info=None, message=None, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -121,6 +122,9 @@ def _make_entry_resource(self, text=None, info=None, message=None, :param http_request: (optional) info about HTTP request associated with the entry + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. + :rtype: dict :returns: The JSON resource created. """ @@ -155,10 +159,13 @@ def _make_entry_resource(self, text=None, info=None, message=None, if http_request is not None: resource['httpRequest'] = http_request + if timestamp is not None: + resource['timestamp'] = _datetime_to_rfc3339(timestamp) + return resource def log_text(self, text, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a text message via a POST request See: @@ -184,15 +191,18 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request) + http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a structured message via a POST request See: @@ -218,15 +228,18 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request) + http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a protobuf message via a POST request See: @@ -252,11 +265,14 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, - severity=severity, http_request=http_request) + severity=severity, http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def delete(self, client=None): @@ -340,7 +356,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() def log_text(self, text, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a text entry to be logged during :meth:`commit`. :type text: str @@ -358,12 +374,16 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('text', text, labels, insert_id, severity, http_request)) + ('text', text, labels, insert_id, severity, http_request, + timestamp)) def log_struct(self, info, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -381,12 +401,16 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('struct', info, labels, insert_id, severity, http_request)) + ('struct', info, labels, insert_id, severity, http_request, + timestamp)) def log_proto(self, message, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -404,9 +428,13 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('proto', message, labels, insert_id, severity, http_request)) + ('proto', message, labels, insert_id, severity, http_request, + timestamp)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -427,7 +455,8 @@ def commit(self, client=None): kwargs['labels'] = self.logger.labels entries = [] - for entry_type, entry, labels, iid, severity, http_req in self.entries: + for (entry_type, entry, labels, iid, severity, http_req, + timestamp) in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -446,6 +475,8 @@ def commit(self, client=None): info['severity'] = severity if http_req is not None: info['httpRequest'] = http_req + if timestamp is not None: + info['timestamp'] = timestamp entries.append(info) client.logging_api.write_entries(entries, **kwargs) diff --git a/logging/unit_tests/handlers/transports/test_background_thread.py b/logging/unit_tests/handlers/transports/test_background_thread.py index 5ca76a2f68c3..a21302f251d7 100644 --- a/logging/unit_tests/handlers/transports/test_background_thread.py +++ b/logging/unit_tests/handlers/transports/test_background_thread.py @@ -157,13 +157,6 @@ def commit(self): del self.entries[:] -class _Connection(object): - - def __init__(self): - self.http = None - self.credentials = object() - - class _Logger(object): def __init__(self, name): @@ -178,9 +171,8 @@ class _Client(object): def __init__(self, project, http=None, credentials=None): self.project = project - self.http = http - self.credentials = credentials - self._connection = _Connection() + self._http = http + self._credentials = credentials def logger(self, name): # pylint: disable=unused-argument self._logger = _Logger(name) diff --git a/logging/unit_tests/test__gax.py b/logging/unit_tests/test__gax.py index 4d269236e3e6..e2f158ffd0cc 100644 --- a/logging/unit_tests/test__gax.py +++ b/logging/unit_tests/test__gax.py @@ -1085,7 +1085,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1130,7 +1130,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1175,7 +1175,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1324,15 +1324,3 @@ def delete_log_metric(self, metric_name, options=None): raise GaxError('error') if self._log_metric_not_found: raise GaxError('notfound', self._make_grpc_not_found()) - - -class _Connection(object): - - def __init__(self, credentials): - self.credentials = credentials - - -class _Client(object): - - def __init__(self, credentials): - self._connection = _Connection(credentials) diff --git a/logging/unit_tests/test__http.py b/logging/unit_tests/test__http.py index 6fe8c825feef..bfc8d7981e46 100644 --- a/logging/unit_tests/test__http.py +++ b/logging/unit_tests/test__http.py @@ -58,11 +58,11 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - connection = object() + connection = _Connection() client = _Client(connection) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) @staticmethod def _make_timestamp(): @@ -308,11 +308,11 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - connection = _make_credentials() + connection = _Connection() client = _Client(connection) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) def test_list_sinks_no_paging(self): import six diff --git a/logging/unit_tests/test_client.py b/logging/unit_tests/test_client.py index 5e48f7b95367..0e215ad1f510 100644 --- a/logging/unit_tests/test_client.py +++ b/logging/unit_tests/test_client.py @@ -52,11 +52,12 @@ def test_logging_api_wo_gax(self): client = self._make_one(self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + + conn = client._connection = _Connection() api = client.logging_api self.assertIsInstance(api, _LoggingAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.logging_api self.assertIs(again, api) @@ -106,11 +107,11 @@ def test_sinks_api_wo_gax(self): self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.sinks_api self.assertIsInstance(api, _SinksAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.sinks_api self.assertIs(again, api) @@ -146,11 +147,11 @@ def test_metrics_api_wo_gax(self): self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.metrics_api self.assertIsInstance(api, _MetricsAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.metrics_api self.assertIs(again, api) @@ -600,7 +601,7 @@ def test_get_default_handler_general(self): credentials=credentials, use_gax=False) handler = client.get_default_handler() - deepcopy.assert_called_once_with(client._connection.http) + deepcopy.assert_called_once_with(client._http) self.assertIsInstance(handler, CloudLoggingHandler) @@ -620,7 +621,7 @@ def test_setup_logging(self): credentials=credentials, use_gax=False) client.setup_logging() - deepcopy.assert_called_once_with(client._connection.http) + deepcopy.assert_called_once_with(client._http) setup_logging.assert_called() diff --git a/logging/unit_tests/test_logger.py b/logging/unit_tests/test_logger.py index 15e7e7146b80..48edaf0ed5a4 100644 --- a/logging/unit_tests/test_logger.py +++ b/logging/unit_tests/test_logger.py @@ -125,6 +125,29 @@ def test_log_text_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_text_w_timestamp(self): + import datetime + + TEXT = 'TEXT' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} @@ -243,6 +266,28 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_w_timestamp(self): + import datetime + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson @@ -332,6 +377,31 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_proto_w_timestamp(self): + import json + import datetime + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_proto(message, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -454,9 +524,10 @@ def test_log_text_defaults(self): batch = self._make_one(logger, client=client) batch.log_text(TEXT) self.assertEqual(batch.entries, - [('text', TEXT, None, None, None, None)]) + [('text', TEXT, None, None, None, None, None)]) def test_log_text_explicit(self): + import datetime TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -469,13 +540,15 @@ def test_log_text_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST) - self.assertEqual(batch.entries, - [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) + http_request=REQUEST, timestamp=TIMESTAMP) + self.assertEqual( + batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} @@ -483,10 +556,12 @@ def test_log_struct_defaults(self): logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) - self.assertEqual(batch.entries, - [('struct', STRUCT, None, None, None, None)]) + self.assertEqual( + batch.entries, + [('struct', STRUCT, None, None, None, None, None)]) def test_log_struct_explicit(self): + import datetime STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -499,13 +574,16 @@ def test_log_struct_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST) - self.assertEqual(batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) + severity=SEVERITY, http_request=REQUEST, + timestamp=TIMESTAMP) + self.assertEqual( + batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value @@ -515,9 +593,10 @@ def test_log_proto_defaults(self): batch = self._make_one(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, - [('proto', message, None, None, None, None)]) + [('proto', message, None, None, None, None, None)]) def test_log_proto_explicit(self): + import datetime from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -531,24 +610,28 @@ def test_log_proto_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST) - self.assertEqual(batch.entries, - [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) + severity=SEVERITY, http_request=REQUEST, + timestamp=TIMESTAMP) + self.assertEqual( + batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) - batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None)) with self.assertRaises(ValueError): batch.commit() def test_commit_w_bound_client(self): import json + import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' @@ -557,23 +640,26 @@ def test_commit_w_bound_client(self): IID1 = 'IID1' IID2 = 'IID2' IID3 = 'IID3' + TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) + TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) + TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) RESOURCE = { 'type': 'global', } ENTRIES = [ - {'textPayload': TEXT, 'insertId': IID1}, - {'jsonPayload': STRUCT, 'insertId': IID2}, + {'textPayload': TEXT, 'insertId': IID1, 'timestamp': TIMESTAMP1}, + {'jsonPayload': STRUCT, 'insertId': IID2, 'timestamp': TIMESTAMP2}, {'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3}, + 'insertId': IID3, 'timestamp': TIMESTAMP3}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_text(TEXT, insert_id=IID1) - batch.log_struct(STRUCT, insert_id=IID2) - batch.log_proto(message, insert_id=IID3) + batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1) + batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2) + batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3) batch.commit() self.assertEqual(list(batch.entries), []) @@ -667,6 +753,7 @@ def test_context_mgr_success(self): (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) def test_context_mgr_failure(self): + import datetime from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -681,20 +768,21 @@ def test_context_mgr_failure(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ - ('text', TEXT, None, IID, None, None), - ('struct', STRUCT, None, None, SEVERITY, None), - ('proto', message, LABELS, None, None, REQUEST), + ('text', TEXT, None, IID, None, None, TIMESTAMP), + ('struct', STRUCT, None, None, SEVERITY, None, None), + ('proto', message, LABELS, None, None, REQUEST, None), ] batch = self._make_one(logger, client=client) try: with batch as other: - other.log_text(TEXT, insert_id=IID) + other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP) other.log_struct(STRUCT, severity=SEVERITY) other.log_proto(message, labels=LABELS, http_request=REQUEST) raise _Bugout() diff --git a/monitoring/google/cloud/monitoring/client.py b/monitoring/google/cloud/monitoring/client.py index de686127c737..ccf3d0866d86 100644 --- a/monitoring/google/cloud/monitoring/client.py +++ b/monitoring/google/cloud/monitoring/client.py @@ -54,20 +54,25 @@ class Client(JSONClient): :param project: The target project. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()`` - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, project=None, credentials=None, http=None): + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def query(self, metric_type=Query.DEFAULT_METRIC_TYPE, diff --git a/pubsub/google/cloud/pubsub/_gax.py b/pubsub/google/cloud/pubsub/_gax.py index 97b73db687aa..582cb8d0e128 100644 --- a/pubsub/google/cloud/pubsub/_gax.py +++ b/pubsub/google/cloud/pubsub/_gax.py @@ -512,48 +512,54 @@ def _received_message_pb_to_mapping(received_message_pb): } -def make_gax_publisher_api(connection): +def make_gax_publisher_api(credentials=None, host=None): """Create an instance of the GAX Publisher API. - If the ``connection`` is intended for a local emulator, then - an insecure ``channel`` is created pointing at the local - Pub / Sub server. + If the ``credentials`` are omitted, then we create an insecure + ``channel`` pointing at the local Pub / Sub emulator. - :type connection: :class:`~google.cloud.pubsub._http.Connection` - :param connection: The connection that holds configuration details. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) Credentials for getting access + tokens. + + :type host: str + :param host: (Optional) The host for an insecure channel. Only + used if ``credentials`` are omitted. :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper connection - configuration. + :returns: A publisher API instance with the proper channel. """ - if connection.in_emulator: - channel = insecure_channel(connection.host) + if credentials is None: + channel = insecure_channel(host) else: channel = make_secure_channel( - connection.credentials, DEFAULT_USER_AGENT, + credentials, DEFAULT_USER_AGENT, PublisherClient.SERVICE_ADDRESS) return PublisherClient(channel=channel) -def make_gax_subscriber_api(connection): +def make_gax_subscriber_api(credentials=None, host=None): """Create an instance of the GAX Subscriber API. - If the ``connection`` is intended for a local emulator, then - an insecure ``channel`` is created pointing at the local - Pub / Sub server. + If the ``credentials`` are omitted, then we create an insecure + ``channel`` pointing at the local Pub / Sub emulator. + + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) Credentials for getting access + tokens. - :type connection: :class:`~google.cloud.pubsub._http.Connection` - :param connection: The connection that holds configuration details. + :type host: str + :param host: (Optional) The host for an insecure channel. Only + used if ``credentials`` are omitted. :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper connection - configuration. + :returns: A subscriber API instance with the proper channel. """ - if connection.in_emulator: - channel = insecure_channel(connection.host) + if credentials is None: + channel = insecure_channel(host) else: channel = make_secure_channel( - connection.credentials, DEFAULT_USER_AGENT, + credentials, DEFAULT_USER_AGENT, SubscriberClient.SERVICE_ADDRESS) return SubscriberClient(channel=channel) diff --git a/pubsub/google/cloud/pubsub/_http.py b/pubsub/google/cloud/pubsub/_http.py index 635c43bdaab5..583413e313b6 100644 --- a/pubsub/google/cloud/pubsub/_http.py +++ b/pubsub/google/cloud/pubsub/_http.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Create / interact with Google Cloud Pub/Sub connections.""" +"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" import base64 import copy @@ -109,7 +109,7 @@ class _PublisherAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_topics(self, project, page_size=None, page_token=None): """API call: list topics for a given project @@ -131,7 +131,7 @@ def list_topics(self, project, page_size=None, page_token=None): :rtype: :class:`~google.cloud.iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current connection. + accessible to the current client. """ extra_params = {} if page_size is not None: @@ -156,8 +156,7 @@ def topic_create(self, topic_path): :rtype: dict :returns: ``Topic`` resource returned from the API. """ - conn = self._connection - return conn.api_request(method='PUT', path='/%s' % (topic_path,)) + return self.api_request(method='PUT', path='/%s' % (topic_path,)) def topic_get(self, topic_path): """API call: retrieve a topic @@ -172,8 +171,7 @@ def topic_get(self, topic_path): :rtype: dict :returns: ``Topic`` resource returned from the API. """ - conn = self._connection - return conn.api_request(method='GET', path='/%s' % (topic_path,)) + return self.api_request(method='GET', path='/%s' % (topic_path,)) def topic_delete(self, topic_path): """API call: delete a topic @@ -185,8 +183,7 @@ def topic_delete(self, topic_path): :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. """ - conn = self._connection - conn.api_request(method='DELETE', path='/%s' % (topic_path,)) + self.api_request(method='DELETE', path='/%s' % (topic_path,)) def topic_publish(self, topic_path, messages): """API call: publish one or more messages to a topic @@ -206,9 +203,8 @@ def topic_publish(self, topic_path, messages): """ messages_to_send = copy.deepcopy(messages) _transform_messages_base64(messages_to_send, _base64_unicode) - conn = self._connection data = {'messages': messages_to_send} - response = conn.api_request( + response = self.api_request( method='POST', path='/%s:publish' % (topic_path,), data=data) return response['messageIds'] @@ -257,7 +253,7 @@ class _SubscriberAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_subscriptions(self, project, page_size=None, page_token=None): """API call: list subscriptions for a given project @@ -328,7 +324,6 @@ def subscription_create(self, subscription_path, topic_path, :rtype: dict :returns: ``Subscription`` resource returned from the API. """ - conn = self._connection path = '/%s' % (subscription_path,) resource = {'topic': topic_path} @@ -338,7 +333,7 @@ def subscription_create(self, subscription_path, topic_path, if push_endpoint is not None: resource['pushConfig'] = {'pushEndpoint': push_endpoint} - return conn.api_request(method='PUT', path=path, data=resource) + return self.api_request(method='PUT', path=path, data=resource) def subscription_get(self, subscription_path): """API call: retrieve a subscription @@ -354,9 +349,8 @@ def subscription_get(self, subscription_path): :rtype: dict :returns: ``Subscription`` resource returned from the API. """ - conn = self._connection path = '/%s' % (subscription_path,) - return conn.api_request(method='GET', path=path) + return self.api_request(method='GET', path=path) def subscription_delete(self, subscription_path): """API call: delete a subscription @@ -369,9 +363,8 @@ def subscription_delete(self, subscription_path): the fully-qualified path of the subscription, in format ``projects//subscriptions/``. """ - conn = self._connection path = '/%s' % (subscription_path,) - conn.api_request(method='DELETE', path=path) + self.api_request(method='DELETE', path=path) def subscription_modify_push_config(self, subscription_path, push_endpoint): @@ -390,10 +383,9 @@ def subscription_modify_push_config(self, subscription_path, (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. """ - conn = self._connection path = '/%s:modifyPushConfig' % (subscription_path,) resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - conn.api_request(method='POST', path=path, data=resource) + self.api_request(method='POST', path=path, data=resource) def subscription_pull(self, subscription_path, return_immediately=False, max_messages=1): @@ -419,13 +411,12 @@ def subscription_pull(self, subscription_path, return_immediately=False, :rtype: list of dict :returns: the ``receivedMessages`` element of the response. """ - conn = self._connection path = '/%s:pull' % (subscription_path,) data = { 'returnImmediately': return_immediately, 'maxMessages': max_messages, } - response = conn.api_request(method='POST', path=path, data=data) + response = self.api_request(method='POST', path=path, data=data) messages = response.get('receivedMessages', ()) _transform_messages_base64(messages, base64.b64decode, 'message') return messages @@ -444,12 +435,11 @@ def subscription_acknowledge(self, subscription_path, ack_ids): :type ack_ids: list of string :param ack_ids: ack IDs of messages being acknowledged """ - conn = self._connection path = '/%s:acknowledge' % (subscription_path,) data = { 'ackIds': ack_ids, } - conn.api_request(method='POST', path=path, data=data) + self.api_request(method='POST', path=path, data=data) def subscription_modify_ack_deadline(self, subscription_path, ack_ids, ack_deadline): @@ -470,24 +460,23 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. """ - conn = self._connection path = '/%s:modifyAckDeadline' % (subscription_path,) data = { 'ackIds': ack_ids, 'ackDeadlineSeconds': ack_deadline, } - conn.api_request(method='POST', path=path, data=data) + self.api_request(method='POST', path=path, data=data) class _IAMPolicyAPI(object): """Helper mapping IAM policy-related APIs. - :type connection: :class:`Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: the client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self.api_request = client._connection.api_request def get_iam_policy(self, target_path): """API call: fetch the IAM policy for the target @@ -502,9 +491,8 @@ def get_iam_policy(self, target_path): :rtype: dict :returns: the resource returned by the ``getIamPolicy`` API request. """ - conn = self._connection path = '/%s:getIamPolicy' % (target_path,) - return conn.api_request(method='GET', path=path) + return self.api_request(method='GET', path=path) def set_iam_policy(self, target_path, policy): """API call: update the IAM policy for the target @@ -522,10 +510,9 @@ def set_iam_policy(self, target_path, policy): :rtype: dict :returns: the resource returned by the ``setIamPolicy`` API request. """ - conn = self._connection wrapped = {'policy': policy} path = '/%s:setIamPolicy' % (target_path,) - return conn.api_request(method='POST', path=path, data=wrapped) + return self.api_request(method='POST', path=path, data=wrapped) def test_iam_permissions(self, target_path, permissions): """API call: test permissions @@ -543,10 +530,9 @@ def test_iam_permissions(self, target_path, permissions): :rtype: dict :returns: the resource returned by the ``getIamPolicy`` API request. """ - conn = self._connection wrapped = {'permissions': permissions} path = '/%s:testIamPermissions' % (target_path,) - resp = conn.api_request(method='POST', path=path, data=wrapped) + resp = self.api_request(method='POST', path=path, data=wrapped) return resp.get('permissions', []) diff --git a/pubsub/google/cloud/pubsub/client.py b/pubsub/google/cloud/pubsub/client.py index dd323aa25fe5..689113631b12 100644 --- a/pubsub/google/cloud/pubsub/client.py +++ b/pubsub/google/cloud/pubsub/client.py @@ -51,15 +51,16 @@ class Client(JSONClient): passed when creating a topic. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. @@ -69,23 +70,33 @@ class Client(JSONClient): falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment variable """ + + _publisher_api = None + _subscriber_api = None + _iam_policy_api = None + def __init__(self, project=None, credentials=None, http=None, use_gax=None): - super(Client, self).__init__(project, credentials, http) + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) if use_gax is None: self._use_gax = _USE_GAX else: self._use_gax = use_gax - _connection_class = Connection - _publisher_api = _subscriber_api = _iam_policy_api = None - @property def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: if self._use_gax: - generated = make_gax_publisher_api(self._connection) + if self._connection.in_emulator: + generated = make_gax_publisher_api( + host=self._connection.host) + else: + generated = make_gax_publisher_api( + credentials=self._credentials) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -96,7 +107,12 @@ def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: if self._use_gax: - generated = make_gax_subscriber_api(self._connection) + if self._connection.in_emulator: + generated = make_gax_subscriber_api( + host=self._connection.host) + else: + generated = make_gax_subscriber_api( + credentials=self._credentials) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) @@ -106,7 +122,7 @@ def subscriber_api(self): def iam_policy_api(self): """Helper for IAM policy-related API calls.""" if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self._connection) + self._iam_policy_api = _IAMPolicyAPI(self) return self._iam_policy_api def list_topics(self, page_size=None, page_token=None): diff --git a/pubsub/google/cloud/pubsub/subscription.py b/pubsub/google/cloud/pubsub/subscription.py index c98277d660df..e76abcf94dd8 100644 --- a/pubsub/google/cloud/pubsub/subscription.py +++ b/pubsub/google/cloud/pubsub/subscription.py @@ -509,4 +509,5 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - self._subscription.acknowledge(list(self), self._client) + if self: + self._subscription.acknowledge(list(self), self._client) diff --git a/pubsub/unit_tests/test__gax.py b/pubsub/unit_tests/test__gax.py index 4f2037d7c4dd..aeb2cfc229cf 100644 --- a/pubsub/unit_tests/test__gax.py +++ b/pubsub/unit_tests/test__gax.py @@ -900,9 +900,9 @@ def test_subscription_modify_ack_deadline_error(self): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_publisher_api(_Base, unittest.TestCase): - def _call_fut(self, connection): + def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_publisher_api - return make_gax_publisher_api(connection) + return make_gax_publisher_api(*args, **kwargs) def test_live_api(self): from google.cloud.pubsub._gax import DEFAULT_USER_AGENT @@ -924,14 +924,12 @@ def make_channel(*args): mock_publisher_api.SERVICE_ADDRESS = host creds = _make_credentials() - connection = _Connection(in_emulator=False, - credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', PublisherClient=mock_publisher_api, make_secure_channel=make_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(creds) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -953,13 +951,12 @@ def mock_insecure_channel(host): return mock_channel host = 'CURR_HOST:1234' - connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', PublisherClient=mock_publisher_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(host=host) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -969,9 +966,9 @@ def mock_insecure_channel(host): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - def _call_fut(self, connection): + def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_subscriber_api - return make_gax_subscriber_api(connection) + return make_gax_subscriber_api(*args, **kwargs) def test_live_api(self): from google.cloud.pubsub._gax import DEFAULT_USER_AGENT @@ -993,14 +990,12 @@ def make_channel(*args): mock_subscriber_api.SERVICE_ADDRESS = host creds = _make_credentials() - connection = _Connection(in_emulator=False, - credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', SubscriberClient=mock_subscriber_api, make_secure_channel=make_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(creds) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -1022,13 +1017,12 @@ def mock_insecure_channel(host): return mock_channel host = 'CURR_HOST:1234' - connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', SubscriberClient=mock_subscriber_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(host=host) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -1207,15 +1201,6 @@ def __init__(self, received_messages): self.received_messages = received_messages -class _Connection(object): - - def __init__(self, in_emulator=False, host=None, - credentials=None): - self.in_emulator = in_emulator - self.host = host - self.credentials = credentials - - class _Client(object): def __init__(self, project): diff --git a/pubsub/unit_tests/test__http.py b/pubsub/unit_tests/test__http.py index 955fc06a9104..e60ebf480684 100644 --- a/pubsub/unit_tests/test__http.py +++ b/pubsub/unit_tests/test__http.py @@ -111,7 +111,7 @@ def test_ctor(self): client = _Client(connection, self.PROJECT) api = self._make_one(client) self.assertIs(api._client, client) - self.assertIs(api._connection, connection) + self.assertEqual(api.api_request, connection.api_request) def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic @@ -449,8 +449,8 @@ def test_ctor(self): connection = _Connection() client = _Client(connection, self.PROJECT) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.client import Client @@ -747,8 +747,9 @@ def _get_target_class(): def test_ctor(self): connection = _Connection() - api = self._make_one(connection) - self.assertIs(api._connection, connection) + client = _Client(connection, None) + api = self._make_one(client) + self.assertEqual(api.api_request, connection.api_request) def test_get_iam_policy(self): from google.cloud.pubsub.iam import OWNER_ROLE @@ -771,7 +772,8 @@ def test_get_iam_policy(self): ], } connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) policy = api.get_iam_policy(self.TOPIC_PATH) @@ -802,7 +804,8 @@ def test_set_iam_policy(self): } RETURNED = POLICY.copy() connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) @@ -822,7 +825,8 @@ def test_test_iam_permissions(self): ALLOWED = ALL_ROLES[1:] RETURNED = {'permissions': ALLOWED} connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) @@ -841,7 +845,8 @@ def test_test_iam_permissions_missing_key(self): ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] RETURNED = {} connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) diff --git a/pubsub/unit_tests/test_client.py b/pubsub/unit_tests/test_client.py index 3bde01417359..34b4cd4d6b8b 100644 --- a/pubsub/unit_tests/test_client.py +++ b/pubsub/unit_tests/test_client.py @@ -46,11 +46,11 @@ def test_publisher_api_wo_gax(self): project=self.PROJECT, credentials=creds, use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.publisher_api self.assertIs(again, api) @@ -68,7 +68,9 @@ def test_no_gax_ctor(self): api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) - def test_publisher_api_w_gax(self): + def _publisher_api_w_gax_helper(self, emulator=False): + from google.cloud.pubsub import _http + wrapped = object() _called_with = [] @@ -86,6 +88,7 @@ def __init__(self, _wrapped, client): client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) + client._connection.in_emulator = emulator patch = mock.patch.multiple( 'google.cloud.pubsub.client', @@ -100,8 +103,17 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.publisher_api self.assertIs(again, api) - args = (client._connection,) - self.assertEqual(_called_with, [(args, {})]) + if emulator: + kwargs = {'host': _http.Connection.API_BASE_URL} + else: + kwargs = {'credentials': creds} + self.assertEqual(_called_with, [((), kwargs)]) + + def test_publisher_api_w_gax(self): + self._publisher_api_w_gax_helper() + + def test_publisher_api_w_gax_and_emulator(self): + self._publisher_api_w_gax_helper(emulator=True) def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI @@ -111,16 +123,18 @@ def test_subscriber_api_wo_gax(self): project=self.PROJECT, credentials=creds, use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.subscriber_api self.assertIsInstance(api, _SubscriberAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.subscriber_api self.assertIs(again, api) - def test_subscriber_api_w_gax(self): + def _subscriber_api_w_gax_helper(self, emulator=False): + from google.cloud.pubsub import _http + wrapped = object() _called_with = [] @@ -138,6 +152,7 @@ def __init__(self, _wrapped, client): client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) + client._connection.in_emulator = emulator patch = mock.patch.multiple( 'google.cloud.pubsub.client', @@ -152,17 +167,27 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.subscriber_api self.assertIs(again, api) - args = (client._connection,) - self.assertEqual(_called_with, [(args, {})]) + if emulator: + kwargs = {'host': _http.Connection.API_BASE_URL} + else: + kwargs = {'credentials': creds} + self.assertEqual(_called_with, [((), kwargs)]) + + def test_subscriber_api_w_gax(self): + self._subscriber_api_w_gax_helper() + + def test_subscriber_api_w_gax_and_emulator(self): + self._subscriber_api_w_gax_helper(emulator=True) def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = object() + conn = client._connection = _Connection() + api = client.iam_policy_api self.assertIsInstance(api, _IAMPolicyAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.iam_policy_api self.assertIs(again, api) diff --git a/pubsub/unit_tests/test_subscription.py b/pubsub/unit_tests/test_subscription.py index 6d4dc1068f2a..6078a3cc70c8 100644 --- a/pubsub/unit_tests/test_subscription.py +++ b/pubsub/unit_tests/test_subscription.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestSubscription(unittest.TestCase): PROJECT = 'PROJECT' @@ -745,6 +747,16 @@ def test___exit___(self): [ACK_ID1, ACK_ID2]) self.assertIs(subscription._ack_client, CLIENT) + def test_empty_ack_no_acknowledge(self): + subscription = mock.Mock(_FauxSubscription) + subscription.pull = lambda *args: [] + + auto_ack = self._make_one(subscription) + with auto_ack: + pass + + subscription.acknowledge.assert_not_called() + class _FauxIAMPolicy(object): diff --git a/resource_manager/google/cloud/resource_manager/client.py b/resource_manager/google/cloud/resource_manager/client.py index dca9446acbb6..a9f78d6a0cb4 100644 --- a/resource_manager/google/cloud/resource_manager/client.py +++ b/resource_manager/google/cloud/resource_manager/client.py @@ -33,20 +33,25 @@ class Client(BaseClient): >>> from google.cloud import resource_manager >>> client = resource_manager.Client() - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, credentials=None, http=None): + super(Client, self).__init__( + credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def new_project(self, project_id, name=None, labels=None): """Create a project bound to the current client. diff --git a/runtimeconfig/google/cloud/runtimeconfig/client.py b/runtimeconfig/google/cloud/runtimeconfig/client.py index e6fd120f9ca3..d74e5349db3e 100644 --- a/runtimeconfig/google/cloud/runtimeconfig/client.py +++ b/runtimeconfig/google/cloud/runtimeconfig/client.py @@ -28,20 +28,25 @@ class Client(JSONClient): (Optional) The project which the client acts on behalf of. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: - (Optional) The OAuth2 Credentials to use for the connection owned by - this client. If not passed (and if no ``http`` object is passed), falls - back to the default inferred from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: - (Optional) An HTTP object to make requests. If not passed, an ``http`` - object is created that is bound to the ``credentials`` for the current - object. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. """ - _connection_class = Connection + def __init__(self, project=None, credentials=None, http=None): + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def config(self, config_name): """Factory constructor for config object. diff --git a/scripts/circleci_tagged_pkg.py b/scripts/circleci_tagged_pkg.py index 0d6b7a3485dd..ba7c99f522d8 100644 --- a/scripts/circleci_tagged_pkg.py +++ b/scripts/circleci_tagged_pkg.py @@ -24,11 +24,16 @@ import sys -RE_TXT = r'^((?P[a-z]+)-)?([0-9]+)\.([0-9]+)\.([0-9]+)$' -TAG_RE = re.compile(RE_TXT) +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) TAG_ENV = 'CIRCLE_TAG' ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) -BAD_TAG_MSG = 'Invalid tag name: %s. Expected ' + RE_TXT +BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' _SCRIPTS_DIR = os.path.dirname(__file__) ROOT_DIR = os.path.abspath(os.path.join(_SCRIPTS_DIR, '..')) @@ -52,7 +57,7 @@ def main(): if pkg_name is None: print(ROOT_DIR) else: - pkg_dir = pkg_name.replace('-', '_') + pkg_dir = pkg_name.rstrip('-').replace('-', '_') print(os.path.join(ROOT_DIR, pkg_dir)) diff --git a/scripts/make_datastore_grpc.py b/scripts/make_datastore_grpc.py deleted file mode 100644 index b0e67ffc7f62..000000000000 --- a/scripts/make_datastore_grpc.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Get the inserted gRPC lines for datastore pb2 file.""" - -import os -import shutil -import subprocess -import sys -import tempfile - -from script_utils import PROJECT_ROOT - - -PROTOS_DIR = os.path.join(PROJECT_ROOT, 'googleapis-pb') -PROTO_PATH = os.path.join(PROTOS_DIR, 'google', 'datastore', - 'v1', 'datastore.proto') -GRPC_ONLY_FILE = os.path.join(PROJECT_ROOT, 'datastore', - 'google', 'cloud', 'datastore', - '_generated', 'datastore_grpc_pb2.py') -GRPCIO_VIRTUALENV = os.getenv('GRPCIO_VIRTUALENV') -if GRPCIO_VIRTUALENV is None: - PYTHON_EXECUTABLE = sys.executable -else: - PYTHON_EXECUTABLE = os.path.join(GRPCIO_VIRTUALENV, 'bin', 'python') -MESSAGE_SNIPPET = ' = _reflection.GeneratedProtocolMessageType(' -IMPORT_TEMPLATE = ( - 'from google.cloud.datastore._generated.datastore_pb2 import %s\n') - - -def get_pb2_contents_with_grpc(): - """Get pb2 lines generated by protoc with gRPC plugin. - - :rtype: list - :returns: A list of lines in the generated file. - """ - temp_dir = tempfile.mkdtemp() - generated_path = os.path.join(temp_dir, 'google', 'datastore', - 'v1', 'datastore_pb2.py') - try: - return_code = subprocess.call([ - PYTHON_EXECUTABLE, - '-m', - 'grpc.tools.protoc', - '--proto_path', - PROTOS_DIR, - '--python_out', - temp_dir, - '--grpc_python_out', - temp_dir, - PROTO_PATH, - ]) - if return_code != 0: - sys.exit(return_code) - with open(generated_path, 'rb') as file_obj: - return file_obj.readlines() - finally: - shutil.rmtree(temp_dir, ignore_errors=True) - - -def get_pb2_contents_without_grpc(): - """Get pb2 lines generated by protoc without gRPC plugin. - - :rtype: list - :returns: A list of lines in the generated file. - """ - temp_dir = tempfile.mkdtemp() - generated_path = os.path.join(temp_dir, 'google', 'datastore', - 'v1', 'datastore_pb2.py') - try: - return_code = subprocess.call([ - PYTHON_EXECUTABLE, - '-m', - 'grpc.tools.protoc', - '--proto_path', - PROTOS_DIR, - '--python_out', - temp_dir, - PROTO_PATH, - ]) - if return_code != 0: - sys.exit(return_code) - with open(generated_path, 'rb') as file_obj: - return file_obj.readlines() - finally: - shutil.rmtree(temp_dir, ignore_errors=True) - - -def get_pb2_grpc_only(): - """Get pb2 lines that are only in gRPC. - - :rtype: list - :returns: A list of lines that are only in the pb2 file - generated with the gRPC plugin. - """ - grpc_contents = get_pb2_contents_with_grpc() - non_grpc_contents = get_pb2_contents_without_grpc() - - grpc_only_lines = [] - curr_non_grpc_line = 0 - for line in grpc_contents: - if line == non_grpc_contents[curr_non_grpc_line]: - curr_non_grpc_line += 1 - else: - grpc_only_lines.append(line) - - return grpc_only_lines - - -def get_pb2_message_types(): - """Get message types defined in datastore pb2 file. - - :rtype: list - :returns: A list of names that are defined as message types. - """ - non_grpc_contents = get_pb2_contents_without_grpc() - result = [] - for line in non_grpc_contents: - if MESSAGE_SNIPPET in line: - name, _ = line.split(MESSAGE_SNIPPET) - result.append(name) - - return sorted(result) - - -def main(): - """Write gRPC-only lines to custom module.""" - grpc_only_lines = get_pb2_grpc_only() - with open(GRPC_ONLY_FILE, 'wb') as file_obj: - # First add imports for public objects in the original. - file_obj.write('# BEGIN: Imports from datastore_pb2\n') - for name in get_pb2_message_types(): - import_line = IMPORT_TEMPLATE % (name,) - file_obj.write(import_line) - file_obj.write('# END: Imports from datastore_pb2\n') - file_obj.write(''.join(grpc_only_lines)) - - -if __name__ == '__main__': - main() diff --git a/scripts/rewrite_imports.py b/scripts/rewrite_imports.py index b066f0c91026..2b2e1ac2146c 100644 --- a/scripts/rewrite_imports.py +++ b/scripts/rewrite_imports.py @@ -27,8 +27,6 @@ # Bigtable v2 'google.bigtable.v2': 'google.cloud.bigtable._generated', 'google.bigtable.admin.v2': 'google.cloud.bigtable._generated', - # Datastore v1 - 'google.datastore.v1': 'google.cloud.datastore._generated', } diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py index 5cbb45c57e3d..7b0b9e06991c 100644 --- a/scripts/run_pylint.py +++ b/scripts/run_pylint.py @@ -35,7 +35,6 @@ IGNORED_DIRECTORIES = [ os.path.join('bigtable', 'google', 'cloud', 'bigtable', '_generated'), - os.path.join('datastore', 'google', 'cloud', 'datastore', '_generated'), ] IGNORED_FILES = [ os.path.join('docs', 'conf.py'), diff --git a/speech/google/cloud/speech/client.py b/speech/google/cloud/speech/client.py index fde4ce89309f..828e74c119b1 100644 --- a/speech/google/cloud/speech/client.py +++ b/speech/google/cloud/speech/client.py @@ -35,15 +35,16 @@ class Client(BaseClient): """Client to bundle configuration needed for API requests. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. @@ -53,16 +54,18 @@ class Client(BaseClient): falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment variable """ + + _speech_api = None + def __init__(self, credentials=None, http=None, use_gax=None): super(Client, self).__init__(credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) if use_gax is None: self._use_gax = _USE_GAX else: self._use_gax = use_gax - _connection_class = Connection - _speech_api = None - def sample(self, content=None, source_uri=None, encoding=None, sample_rate=None): """Factory: construct Sample to use when making recognize requests. diff --git a/storage/google/cloud/storage/client.py b/storage/google/cloud/storage/client.py index 166f702f309d..d7697a4323eb 100644 --- a/storage/google/cloud/storage/client.py +++ b/storage/google/cloud/storage/client.py @@ -32,25 +32,26 @@ class Client(JSONClient): passed when creating a topic. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection - def __init__(self, project=None, credentials=None, http=None): self._base_connection = None super(Client, self).__init__(project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) self._batch_stack = _LocalStack() @property diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py index 3c266c760440..622d63bc3788 100644 --- a/system_tests/bigquery.py +++ b/system_tests/bigquery.py @@ -482,9 +482,21 @@ def _job_done(instance): def test_sync_query_w_standard_sql_types(self): import datetime from google.cloud._helpers import UTC + from google.cloud.bigquery._helpers import ScalarQueryParameter + from google.cloud.bigquery._helpers import StructQueryParameter naive = datetime.datetime(2016, 12, 5, 12, 41, 9) stamp = "%s %s" % (naive.date().isoformat(), naive.time().isoformat()) zoned = naive.replace(tzinfo=UTC) + zoned_param = ScalarQueryParameter( + name='zoned', type_='TIMESTAMP', value=zoned) + question = 'What is the answer to life, the universe, and everything?' + question_param = ScalarQueryParameter( + name='question', type_='STRING', value=question) + answer = 42 + answer_param = ScalarQueryParameter( + name='answer', type_='INT64', value=answer) + struct_param = StructQueryParameter( + 'hitchhiker', question_param, answer_param) EXAMPLES = [ { 'sql': 'SELECT 1', @@ -553,9 +565,21 @@ def test_sync_query_w_standard_sql_types(self): 'sql': 'SELECT ARRAY(SELECT STRUCT([1, 2]))', 'expected': [{u'_field_1': [1, 2]}], }, + { + 'sql': 'SELECT @zoned', + 'expected': zoned, + 'query_parameters': [zoned_param], + }, + { + 'sql': 'SELECT (@hitchhiker.question, @hitchhiker.answer)', + 'expected': ({'_field_1': question, '_field_2': answer}), + 'query_parameters': [struct_param], + }, ] for example in EXAMPLES: - query = Config.CLIENT.run_sync_query(example['sql']) + query = Config.CLIENT.run_sync_query( + example['sql'], + query_parameters=example.get('query_parameters', ())) query.use_legacy_sql = False query.run() self.assertEqual(len(query.rows), 1) diff --git a/system_tests/datastore.py b/system_tests/datastore.py index d7da651a3624..4aff9626ac6d 100644 --- a/system_tests/datastore.py +++ b/system_tests/datastore.py @@ -529,11 +529,8 @@ def _submodules(self): pkg_iter = pkgutil.iter_modules(datastore.__path__) result = [] for _, mod_name, ispkg in pkg_iter: - if mod_name == '_generated': - self.assertTrue(ispkg) - else: - self.assertFalse(ispkg) - result.append(mod_name) + self.assertFalse(ispkg) + result.append(mod_name) self.assertNotIn('__init__', result) return result diff --git a/tox.ini b/tox.ini index 388cfeef859d..043c424d05a1 100644 --- a/tox.ini +++ b/tox.ini @@ -241,7 +241,6 @@ passenv = exclude = docs/conf.py, bigtable/google/cloud/bigtable/_generated/*, - datastore/google/cloud/datastore/_generated/* verbose = 1 [testenv:lint] diff --git a/translate/README.rst b/translate/README.rst index 9a5c0f16e0fd..a85374ff5298 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -1,9 +1,9 @@ -Python Client for Google Translate -================================== +Python Client for Google Cloud Translation +========================================== - Python idiomatic client for `Google Translate`_ + Python idiomatic client for `Google Cloud Translation`_ -.. _Google Translate: https://cloud.google.com/translate/ +.. _Google Cloud Translation: https://cloud.google.com/translate/ |pypi| |versions| @@ -32,13 +32,13 @@ the ``google-cloud-*`` libraries to be helpful. Using the API ------------- -With the Google `Translate`_ API (`Translate API docs`_), you can +With the Google Cloud `Translation`_ API (`Translation API docs`_), you can dynamically translate text between thousands of language pairs. -.. _Translate: https://cloud.google.com/translate/ -.. _Translate API docs: https://cloud.google.com/translate/docs/apis +.. _Translation: https://cloud.google.com/translate/ +.. _Translation API docs: https://cloud.google.com/translate/docs/apis -See the ``google-cloud-python`` API Translate `Documentation`_ to learn +See the ``google-cloud-python`` API Translation `Documentation`_ to learn how to translate text using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-translate.svg diff --git a/translate/google/cloud/translate/__init__.py b/translate/google/cloud/translate/__init__.py index 83ff5f114435..006ac866ab8d 100644 --- a/translate/google/cloud/translate/__init__.py +++ b/translate/google/cloud/translate/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud Translate API wrapper.""" +"""Google Cloud Translation API wrapper.""" from google.cloud.translate.client import BASE from google.cloud.translate.client import Client diff --git a/translate/google/cloud/translate/client.py b/translate/google/cloud/translate/client.py index ea5359007bb6..c8c915118cab 100644 --- a/translate/google/cloud/translate/client.py +++ b/translate/google/cloud/translate/client.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Client for interacting with the Google Cloud Translate API.""" +"""Client for interacting with the Google Cloud Translation API.""" import six @@ -40,23 +40,26 @@ class Client(BaseClient): translations and language names. (Defaults to :data:`ENGLISH_ISO_639`.) - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for the - connection owned by this client. If not passed (and - if no ``http`` object is passed), falls back to the - default inferred from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: (Optional) HTTP object to make requests. If not - passed, an :class:`httplib.Http` object is created. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. """ - _connection_class = Connection - def __init__(self, target_language=ENGLISH_ISO_639, credentials=None, http=None): self.target_language = target_language super(Client, self).__init__(credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def get_languages(self, target_language=None): """Get list of supported languages for translation. @@ -158,8 +161,7 @@ def translate(self, values, target_language=None, format_=None, model=None): """Translate a string or list of strings. - See: https://cloud.google.com/translate/v2/\ - translating-text-with-rest + See: https://cloud.google.com/translate/docs/translating-text :type values: str or list :param values: String or list of strings to translate. diff --git a/translate/google/cloud/translate/connection.py b/translate/google/cloud/translate/connection.py index 0582bcd22a3f..518e7e424a93 100644 --- a/translate/google/cloud/translate/connection.py +++ b/translate/google/cloud/translate/connection.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Create / interact with Google Cloud Translate connections.""" +"""Create / interact with Google Cloud Translation connections.""" from google.cloud import _http class Connection(_http.JSONConnection): - """A connection to Google Cloud Translate via the JSON REST API.""" + """A connection to Google Cloud Translation API via the JSON REST API.""" API_BASE_URL = 'https://translation.googleapis.com' """The base of the API call URL.""" diff --git a/translate/setup.py b/translate/setup.py index 68351f553edb..d57f326631c3 100644 --- a/translate/setup.py +++ b/translate/setup.py @@ -56,7 +56,7 @@ setup( name='google-cloud-translate', version='0.22.0', - description='Python Client for Google Translate', + description='Python Client for Google Cloud Translation API', long_description=README, namespace_packages=[ 'google', diff --git a/vision/google/cloud/vision/_http.py b/vision/google/cloud/vision/_http.py new file mode 100644 index 000000000000..8bacdf01bb70 --- /dev/null +++ b/vision/google/cloud/vision/_http.py @@ -0,0 +1,78 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""HTTP Client for interacting with the Google Cloud Vision API.""" + +from google.cloud.vision.feature import Feature + + +class _HTTPVisionAPI(object): + """Vision API for interacting with the JSON/HTTP version of Vision + + :type client: :class:`~google.cloud.core.client.Client` + :param client: Instance of ``Client`` object. + """ + + def __init__(self, client): + self._client = client + self._connection = client._connection + + def annotate(self, image, features): + """Annotate an image to discover it's attributes. + + :type image: :class:`~google.cloud.vision.image.Image` + :param image: A instance of ``Image``. + + :type features: list of :class:`~google.cloud.vision.feature.Feature` + :param features: The type of detection that the Vision API should + use to determine image attributes. Pricing is + based on the number of Feature Types. + + See: https://cloud.google.com/vision/docs/pricing + :rtype: dict + :returns: List of annotations. + """ + request = _make_request(image, features) + + data = {'requests': [request]} + api_response = self._connection.api_request( + method='POST', path='/images:annotate', data=data) + responses = api_response.get('responses') + return responses[0] + + +def _make_request(image, features): + """Prepare request object to send to Vision API. + + :type image: :class:`~google.cloud.vision.image.Image` + :param image: Instance of ``Image``. + + :type features: list of :class:`~google.cloud.vision.feature.Feature` + :param features: Either a list of ``Feature`` instances or a single + instance of ``Feature``. + + :rtype: dict + :returns: Dictionary prepared to send to the Vision API. + """ + if isinstance(features, Feature): + features = [features] + + feature_check = (isinstance(feature, Feature) for feature in features) + if not any(feature_check): + raise TypeError('Feature or list of Feature classes are required.') + + return { + 'image': image.as_dict(), + 'features': [feature.as_dict() for feature in features], + } diff --git a/vision/google/cloud/vision/client.py b/vision/google/cloud/vision/client.py index 4d071bf8ad32..219db00e1c86 100644 --- a/vision/google/cloud/vision/client.py +++ b/vision/google/cloud/vision/client.py @@ -14,49 +14,10 @@ """Client for interacting with the Google Cloud Vision API.""" - from google.cloud.client import JSONClient from google.cloud.vision.connection import Connection -from google.cloud.vision.feature import Feature from google.cloud.vision.image import Image - - -class VisionRequest(object): - """Request container with image and features information to annotate. - - :type features: list of :class:`~gcoud.vision.feature.Feature`. - :param features: The features that dictate which annotations to run. - - :type image: bytes - :param image: Either Google Cloud Storage URI or raw byte stream of image. - """ - def __init__(self, image, features): - self._features = [] - self._image = image - - if isinstance(features, list): - self._features.extend(features) - elif isinstance(features, Feature): - self._features.append(features) - else: - raise TypeError('Feature or list of Feature classes are required.') - - def as_dict(self): - """Dictionary representation of Image.""" - return { - 'image': self.image.as_dict(), - 'features': [feature.as_dict() for feature in self.features] - } - - @property - def features(self): - """List of Feature objects.""" - return self._features - - @property - def image(self): - """Image object containing image content.""" - return self._image +from google.cloud.vision._http import _HTTPVisionAPI class Client(JSONClient): @@ -67,44 +28,26 @@ class Client(JSONClient): If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ + _vision_api_internal = None - _connection_class = Connection - - def annotate(self, image, features): - """Annotate an image to discover it's attributes. - - :type image: str - :param image: A string which can be a URL, a Google Cloud Storage path, - or a byte stream of the image. - - :type features: list of :class:`~google.cloud.vision.feature.Feature` - :param features: The type of detection that the Vision API should - use to determine image attributes. Pricing is - based on the number of Feature Types. - - See: https://cloud.google.com/vision/docs/pricing - :rtype: dict - :returns: List of annotations. - """ - request = VisionRequest(image, features) - - data = {'requests': [request.as_dict()]} - response = self._connection.api_request( - method='POST', path='/images:annotate', data=data) - - return response['responses'][0] + def __init__(self, project=None, credentials=None, http=None): + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) def image(self, content=None, filename=None, source_uri=None): """Get instance of Image using current client. @@ -123,3 +66,14 @@ def image(self, content=None, filename=None, source_uri=None): """ return Image(client=self, content=content, filename=filename, source_uri=source_uri) + + @property + def _vision_api(self): + """Proxy method that handles which transport call Vision Annotate. + + :rtype: :class:`~google.cloud.vision._rest._HTTPVisionAPI` + :returns: Instance of ``_HTTPVisionAPI`` used to make requests. + """ + if self._vision_api_internal is None: + self._vision_api_internal = _HTTPVisionAPI(self) + return self._vision_api_internal diff --git a/vision/google/cloud/vision/image.py b/vision/google/cloud/vision/image.py index d094b6702537..f9a429e24e0b 100644 --- a/vision/google/cloud/vision/image.py +++ b/vision/google/cloud/vision/image.py @@ -109,7 +109,7 @@ def _detect_annotation(self, features): :class:`~google.cloud.vision.color.ImagePropertiesAnnotation`, :class:`~google.cloud.vision.sage.SafeSearchAnnotation`, """ - results = self.client.annotate(self, features) + results = self.client._vision_api.annotate(self, features) return Annotations.from_api_repr(results) def detect(self, features): diff --git a/vision/unit_tests/test__http.py b/vision/unit_tests/test__http.py new file mode 100644 index 000000000000..d6c237d9747c --- /dev/null +++ b/vision/unit_tests/test__http.py @@ -0,0 +1,65 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import unittest + + +IMAGE_CONTENT = b'/9j/4QNURXhpZgAASUkq' +PROJECT = 'PROJECT' +B64_IMAGE_CONTENT = base64.b64encode(IMAGE_CONTENT).decode('ascii') + + +class TestVisionRequest(unittest.TestCase): + @staticmethod + def _get_target_function(): + from google.cloud.vision._http import _make_request + return _make_request + + def _call_fut(self, *args, **kw): + return self._get_target_function()(*args, **kw) + + def test_call_vision_request(self): + from google.cloud.vision.feature import Feature + from google.cloud.vision.feature import FeatureTypes + from google.cloud.vision.image import Image + + client = object() + image = Image(client, content=IMAGE_CONTENT) + feature = Feature(feature_type=FeatureTypes.FACE_DETECTION, + max_results=3) + request = self._call_fut(image, feature) + self.assertEqual(request['image'].get('content'), B64_IMAGE_CONTENT) + features = request['features'] + self.assertEqual(len(features), 1) + feature = features[0] + print(feature) + self.assertEqual(feature['type'], FeatureTypes.FACE_DETECTION) + self.assertEqual(feature['maxResults'], 3) + + def test_call_vision_request_with_not_feature(self): + from google.cloud.vision.image import Image + + client = object() + image = Image(client, content=IMAGE_CONTENT) + with self.assertRaises(TypeError): + self._call_fut(image, 'nonsensefeature') + + def test_call_vision_request_with_list_bad_features(self): + from google.cloud.vision.image import Image + + client = object() + image = Image(client, content=IMAGE_CONTENT) + with self.assertRaises(TypeError): + self._call_fut(image, ['nonsensefeature']) diff --git a/vision/unit_tests/test_client.py b/vision/unit_tests/test_client.py index af6e23a6b01a..e49d81c6cfa0 100644 --- a/vision/unit_tests/test_client.py +++ b/vision/unit_tests/test_client.py @@ -43,6 +43,18 @@ def test_ctor(self): client = self._make_one(project=PROJECT, credentials=creds) self.assertEqual(client.project, PROJECT) + def test_annotate_with_preset_api(self): + credentials = _make_credentials() + client = self._make_one(project=PROJECT, credentials=credentials) + client._connection = _Connection() + + api = mock.Mock() + api.annotate.return_value = mock.sentinel.annotated + + client._vision_api_internal = api + client._vision_api.annotate() + api.annotate.assert_called_once_with() + def test_face_annotation(self): from google.cloud.vision.feature import Feature, FeatureTypes from unit_tests._fixtures import FACE_DETECTION_RESPONSE @@ -70,7 +82,7 @@ def test_face_annotation(self): features = [Feature(feature_type=FeatureTypes.FACE_DETECTION, max_results=3)] image = client.image(content=IMAGE_CONTENT) - response = client.annotate(image, features) + response = client._vision_api.annotate(image, features) self.assertEqual(REQUEST, client._connection._requested[0]['data']) @@ -433,30 +445,6 @@ def test_image_properties_no_results(self): self.assertEqual(len(image_properties), 0) -class TestVisionRequest(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.vision.client import VisionRequest - return VisionRequest - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_make_vision_request(self): - from google.cloud.vision.feature import Feature, FeatureTypes - - feature = Feature(feature_type=FeatureTypes.FACE_DETECTION, - max_results=3) - vision_request = self._make_one(IMAGE_CONTENT, feature) - self.assertEqual(IMAGE_CONTENT, vision_request.image) - self.assertEqual(FeatureTypes.FACE_DETECTION, - vision_request.features[0].feature_type) - - def test_make_vision_request_with_bad_feature(self): - with self.assertRaises(TypeError): - self._make_one(IMAGE_CONTENT, 'nonsensefeature') - - class _Connection(object): def __init__(self, *responses):