diff --git a/language/google/cloud/gapic/__init__.py b/language/google/cloud/gapic/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/language/google/cloud/gapic/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/__init__.py b/language/google/cloud/gapic/language/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/language/google/cloud/gapic/language/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/v1/language_service_client.py b/language/google/cloud/gapic/language/v1/language_service_client.py deleted file mode 100644 index b413de49fa83..000000000000 --- a/language/google/cloud/gapic/language/v1/language_service_client.py +++ /dev/null @@ -1,284 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/cloud/language/v1/language_service.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. -"""Accesses the google.cloud.language.v1 LanguageService API.""" - -import collections -import json -import os -import pkg_resources -import platform - -from google.gax import api_callable -from google.gax import config -from google.gax import path_template -import google.gax - -from google.cloud.gapic.language.v1 import enums -from google.cloud.proto.language.v1 import language_service_pb2 - - -class LanguageServiceClient(object): - """ - Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - SERVICE_ADDRESS = 'language.googleapis.com' - """The default address of the service.""" - - DEFAULT_SERVICE_PORT = 443 - """The default port of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) - - def __init__(self, - service_path=SERVICE_ADDRESS, - port=DEFAULT_SERVICE_PORT, - channel=None, - credentials=None, - ssl_credentials=None, - scopes=None, - client_config=None, - app_name=None, - app_version='', - lib_name=None, - lib_version='', - metrics_headers=()): - """Constructor. - - Args: - service_path (string): The domain name of the API remote host. - port (int): The port on which to connect to the remote host. - channel (:class:`grpc.Channel`): A ``Channel`` instance through - which to make calls. - credentials (object): The authorization credentials to attach to - requests. These credentials identify this application to the - service. - ssl_credentials (:class:`grpc.ChannelCredentials`): A - ``ChannelCredentials`` instance for use with an SSL-enabled - channel. - scopes (list[string]): A list of OAuth2 scopes to attach to requests. - client_config (dict): - A dictionary for call options for each method. See - :func:`google.gax.construct_settings` for the structure of - this data. Falls back to the default config if not specified - or the specified config is missing data points. - app_name (string): The name of the application calling - the service. Recommended for analytics purposes. - app_version (string): The version of the application calling - the service. Recommended for analytics purposes. - lib_name (string): The API library software used for calling - the service. (Unless you are writing an API client itself, - leave this as default.) - lib_version (string): The API library software version used - for calling the service. (Unless you are writing an API client - itself, leave this as default.) - metrics_headers (dict): A dictionary of values for tracking - client library metrics. Ultimately serializes to a string - (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be - considered private. - - Returns: - A LanguageServiceClient object. - """ - # Unless the calling application specifically requested - # OAuth scopes, request everything. - if scopes is None: - scopes = self._ALL_SCOPES - - # Initialize an empty client config, if none is set. - if client_config is None: - client_config = {} - - # Initialize metrics_headers as an ordered dictionary - # (cuts down on cardinality of the resulting string slightly). - metrics_headers = collections.OrderedDict(metrics_headers) - metrics_headers['gl-python'] = platform.python_version() - - # The library may or may not be set, depending on what is - # calling this client. Newer client libraries set the library name - # and version. - if lib_name: - metrics_headers[lib_name] = lib_version - - # Finally, track the GAPIC package version. - metrics_headers['gapic'] = pkg_resources.get_distribution( - 'google-cloud-language', ).version - - # Load the configuration defaults. - default_client_config = json.loads( - pkg_resources.resource_string( - __name__, 'language_service_client_config.json').decode()) - defaults = api_callable.construct_settings( - 'google.cloud.language.v1.LanguageService', - default_client_config, - client_config, - config.STATUS_CODE_NAMES, - metrics_headers=metrics_headers, ) - self.language_service_stub = config.create_stub( - language_service_pb2.LanguageServiceStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - - self._analyze_sentiment = api_callable.create_api_call( - self.language_service_stub.AnalyzeSentiment, - settings=defaults['analyze_sentiment']) - self._analyze_entities = api_callable.create_api_call( - self.language_service_stub.AnalyzeEntities, - settings=defaults['analyze_entities']) - self._analyze_syntax = api_callable.create_api_call( - self.language_service_stub.AnalyzeSyntax, - settings=defaults['analyze_syntax']) - self._annotate_text = api_callable.create_api_call( - self.language_service_stub.AnnotateText, - settings=defaults['annotate_text']) - - # Service calls - def analyze_sentiment(self, document, encoding_type=None, options=None): - """ - Analyzes the sentiment of the provided text. - - Example: - >>> from google.cloud.gapic.language.v1 import language_service_client - >>> from google.cloud.proto.language.v1 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_sentiment(document) - - Args: - document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSentimentResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeSentimentRequest( - document=document, encoding_type=encoding_type) - return self._analyze_sentiment(request, options) - - def analyze_entities(self, document, encoding_type=None, options=None): - """ - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Example: - >>> from google.cloud.gapic.language.v1 import language_service_client - >>> from google.cloud.proto.language.v1 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_entities(document) - - Args: - document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeEntitiesResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeEntitiesRequest( - document=document, encoding_type=encoding_type) - return self._analyze_entities(request, options) - - def analyze_syntax(self, document, encoding_type=None, options=None): - """ - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - - Example: - >>> from google.cloud.gapic.language.v1 import language_service_client - >>> from google.cloud.proto.language.v1 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_syntax(document) - - Args: - document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSyntaxResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeSyntaxRequest( - document=document, encoding_type=encoding_type) - return self._analyze_syntax(request, options) - - def annotate_text(self, - document, - features, - encoding_type=None, - options=None): - """ - A convenience method that provides all the features that analyzeSentiment, - analyzeEntities, and analyzeSyntax provide in one call. - - Example: - >>> from google.cloud.gapic.language.v1 import language_service_client - >>> from google.cloud.proto.language.v1 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> features = language_service_pb2.AnnotateTextRequest.Features() - >>> response = client.annotate_text(document, features) - - Args: - document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. - features (:class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. - encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnnotateTextRequest( - document=document, features=features, encoding_type=encoding_type) - return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1/language_service_client_config.json b/language/google/cloud/gapic/language/v1/language_service_client_config.json deleted file mode 100644 index 202d5b0d427b..000000000000 --- a/language/google/cloud/gapic/language/v1/language_service_client_config.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "interfaces": { - "google.cloud.language.v1.LanguageService": { - "retry_codes": { - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "non_idempotent": [] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "AnalyzeSentiment": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnalyzeEntities": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnalyzeSyntax": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnnotateText": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client.py b/language/google/cloud/gapic/language/v1beta2/language_service_client.py deleted file mode 100644 index 0150ca4f4b83..000000000000 --- a/language/google/cloud/gapic/language/v1beta2/language_service_client.py +++ /dev/null @@ -1,320 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/cloud/language/v1beta2/language_service.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. -"""Accesses the google.cloud.language.v1beta2 LanguageService API.""" - -import collections -import json -import os -import pkg_resources -import platform - -from google.gax import api_callable -from google.gax import config -from google.gax import path_template -import google.gax - -from google.cloud.gapic.language.v1beta2 import enums -from google.cloud.proto.language.v1beta2 import language_service_pb2 - - -class LanguageServiceClient(object): - """ - Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - SERVICE_ADDRESS = 'language.googleapis.com' - """The default address of the service.""" - - DEFAULT_SERVICE_PORT = 443 - """The default port of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) - - def __init__(self, - service_path=SERVICE_ADDRESS, - port=DEFAULT_SERVICE_PORT, - channel=None, - credentials=None, - ssl_credentials=None, - scopes=None, - client_config=None, - app_name=None, - app_version='', - lib_name=None, - lib_version='', - metrics_headers=()): - """Constructor. - - Args: - service_path (string): The domain name of the API remote host. - port (int): The port on which to connect to the remote host. - channel (:class:`grpc.Channel`): A ``Channel`` instance through - which to make calls. - credentials (object): The authorization credentials to attach to - requests. These credentials identify this application to the - service. - ssl_credentials (:class:`grpc.ChannelCredentials`): A - ``ChannelCredentials`` instance for use with an SSL-enabled - channel. - scopes (list[string]): A list of OAuth2 scopes to attach to requests. - client_config (dict): - A dictionary for call options for each method. See - :func:`google.gax.construct_settings` for the structure of - this data. Falls back to the default config if not specified - or the specified config is missing data points. - app_name (string): The name of the application calling - the service. Recommended for analytics purposes. - app_version (string): The version of the application calling - the service. Recommended for analytics purposes. - lib_name (string): The API library software used for calling - the service. (Unless you are writing an API client itself, - leave this as default.) - lib_version (string): The API library software version used - for calling the service. (Unless you are writing an API client - itself, leave this as default.) - metrics_headers (dict): A dictionary of values for tracking - client library metrics. Ultimately serializes to a string - (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be - considered private. - - Returns: - A LanguageServiceClient object. - """ - # Unless the calling application specifically requested - # OAuth scopes, request everything. - if scopes is None: - scopes = self._ALL_SCOPES - - # Initialize an empty client config, if none is set. - if client_config is None: - client_config = {} - - # Initialize metrics_headers as an ordered dictionary - # (cuts down on cardinality of the resulting string slightly). - metrics_headers = collections.OrderedDict(metrics_headers) - metrics_headers['gl-python'] = platform.python_version() - - # The library may or may not be set, depending on what is - # calling this client. Newer client libraries set the library name - # and version. - if lib_name: - metrics_headers[lib_name] = lib_version - - # Finally, track the GAPIC package version. - metrics_headers['gapic'] = pkg_resources.get_distribution( - 'google-cloud-language', ).version - - # Load the configuration defaults. - default_client_config = json.loads( - pkg_resources.resource_string( - __name__, 'language_service_client_config.json').decode()) - defaults = api_callable.construct_settings( - 'google.cloud.language.v1beta2.LanguageService', - default_client_config, - client_config, - config.STATUS_CODE_NAMES, - metrics_headers=metrics_headers, ) - self.language_service_stub = config.create_stub( - language_service_pb2.LanguageServiceStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - - self._analyze_sentiment = api_callable.create_api_call( - self.language_service_stub.AnalyzeSentiment, - settings=defaults['analyze_sentiment']) - self._analyze_entities = api_callable.create_api_call( - self.language_service_stub.AnalyzeEntities, - settings=defaults['analyze_entities']) - self._analyze_entity_sentiment = api_callable.create_api_call( - self.language_service_stub.AnalyzeEntitySentiment, - settings=defaults['analyze_entity_sentiment']) - self._analyze_syntax = api_callable.create_api_call( - self.language_service_stub.AnalyzeSyntax, - settings=defaults['analyze_syntax']) - self._annotate_text = api_callable.create_api_call( - self.language_service_stub.AnnotateText, - settings=defaults['annotate_text']) - - # Service calls - def analyze_sentiment(self, document, encoding_type=None, options=None): - """ - Analyzes the sentiment of the provided text. - - Example: - >>> from google.cloud.gapic.language.v1beta2 import language_service_client - >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_sentiment(document) - - Args: - document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets for the - sentence sentiment. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSentimentResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeSentimentRequest( - document=document, encoding_type=encoding_type) - return self._analyze_sentiment(request, options) - - def analyze_entities(self, document, encoding_type=None, options=None): - """ - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Example: - >>> from google.cloud.gapic.language.v1beta2 import language_service_client - >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_entities(document) - - Args: - document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitiesResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeEntitiesRequest( - document=document, encoding_type=encoding_type) - return self._analyze_entities(request, options) - - def analyze_entity_sentiment(self, - document, - encoding_type=None, - options=None): - """ - Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes - sentiment associated with each entity and its mentions. - - Example: - >>> from google.cloud.gapic.language.v1beta2 import language_service_client - >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_entity_sentiment(document) - - Args: - document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitySentimentResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeEntitySentimentRequest( - document=document, encoding_type=encoding_type) - return self._analyze_entity_sentiment(request, options) - - def analyze_syntax(self, document, encoding_type=None, options=None): - """ - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - - Example: - >>> from google.cloud.gapic.language.v1beta2 import language_service_client - >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> response = client.analyze_syntax(document) - - Args: - document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. - encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSyntaxResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnalyzeSyntaxRequest( - document=document, encoding_type=encoding_type) - return self._analyze_syntax(request, options) - - def annotate_text(self, - document, - features, - encoding_type=None, - options=None): - """ - A convenience method that provides all syntax, sentiment, and entity - features in one call. - - Example: - >>> from google.cloud.gapic.language.v1beta2 import language_service_client - >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 - >>> client = language_service_client.LanguageServiceClient() - >>> document = language_service_pb2.Document() - >>> features = language_service_pb2.AnnotateTextRequest.Features() - >>> response = client.annotate_text(document, features) - - Args: - document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. - features (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. - encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - request = language_service_pb2.AnnotateTextRequest( - document=document, features=features, encoding_type=encoding_type) - return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json b/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json deleted file mode 100644 index 8018f8a7bbf5..000000000000 --- a/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "interfaces": { - "google.cloud.language.v1beta2.LanguageService": { - "retry_codes": { - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "non_idempotent": [] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "AnalyzeSentiment": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnalyzeEntities": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnalyzeEntitySentiment": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnalyzeSyntax": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "AnnotateText": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/language/google/cloud/language_v1/__init__.py b/language/google/cloud/language_v1/__init__.py index a5666eadb5c7..15519e67f4fc 100644 --- a/language/google/cloud/language_v1/__init__.py +++ b/language/google/cloud/language_v1/__init__.py @@ -14,17 +14,17 @@ from __future__ import absolute_import -from google.cloud.gapic.language.v1 import language_service_client as lsc -from google.cloud.gapic.language.v1 import enums - from google.cloud.language_v1 import types +from google.cloud.language_v1.gapic import enums +from google.cloud.language_v1.gapic import language_service_client -LanguageServiceClient = lsc.LanguageServiceClient +class LanguageServiceClient(language_service_client.LanguageServiceClient): + __doc__ = language_service_client.LanguageServiceClient.__doc__ + enums = enums __all__ = ( 'enums', - 'LanguageServiceClient', 'types', -) + 'LanguageServiceClient', ) diff --git a/language/google/cloud/gapic/language/v1/__init__.py b/language/google/cloud/language_v1/gapic/__init__.py similarity index 100% rename from language/google/cloud/gapic/language/v1/__init__.py rename to language/google/cloud/language_v1/gapic/__init__.py diff --git a/language/google/cloud/gapic/language/v1beta2/enums.py b/language/google/cloud/language_v1/gapic/enums.py similarity index 94% rename from language/google/cloud/gapic/language/v1beta2/enums.py rename to language/google/cloud/language_v1/gapic/enums.py index 2b53e4d913bb..689033aa4db2 100644 --- a/language/google/cloud/gapic/language/v1beta2/enums.py +++ b/language/google/cloud/language_v1/gapic/enums.py @@ -1,10 +1,10 @@ -# Copyright 2016 Google Inc. All rights reserved. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,16 +24,16 @@ class EncodingType(object): Attributes: NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as - ``begin_offset``) will be set at ``-1``. + ``begin_offset``) will be set at ``-1``. UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-8 encoding of the input. C++ and Go are examples of languages - that use this encoding natively. + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-16 encoding of the input. Java and Javascript are examples of - languages that use this encoding natively. + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-32 encoding of the input. Python is an example of a language - that uses this encoding natively. + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. """ NONE = 0 UTF8 = 1 @@ -292,7 +292,7 @@ class Reciprocity(object): Attributes: RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not - predicted. + predicted. RECIPROCAL (int): Reciprocal NON_RECIPROCAL (int): Non-reciprocal """ @@ -421,6 +421,12 @@ class Label(object): NUMC (int): Compound of numeric modifier COP (int): Copula DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + ASP (int): Aspect marker + GMOD (int): Genitive modifier + GOBJ (int): Genitive object + INFMOD (int): Infinitival modifier + MES (int): Measure + NCOMP (int): Nominal complement of a noun """ UNKNOWN = 0 ABBREV = 1 @@ -499,6 +505,12 @@ class Label(object): NUMC = 74 COP = 75 DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 class EntityMention(object): diff --git a/language/google/cloud/language_v1/gapic/language_service_client.py b/language/google/cloud/language_v1/gapic/language_service_client.py new file mode 100644 index 000000000000..f3bd9f6069cf --- /dev/null +++ b/language/google/cloud/language_v1/gapic/language_service_client.py @@ -0,0 +1,289 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.language_v1.gapic import enums +from google.cloud.language_v1.gapic import language_service_client_config +from google.cloud.language_v1.proto import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + channel (~grpc.Channel): A ``Channel`` instance through + which to make calls. + credentials (~google.auth.credentials.Credentials): The authorization + credentials to attach to requests. These credentials identify this + application to the service. + ssl_credentials (~grpc.ChannelCredentials): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (Sequence[str]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + lib_name (str): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (str): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: LanguageServiceClient + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + defaults = api_callable.construct_settings( + 'google.cloud.language.v1.LanguageService', + language_service_client_config.config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=self.SERVICE_ADDRESS, + service_port=self.DEFAULT_SERVICE_PORT, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud import language_v1 + >>> + >>> client = language_v1.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_sentiment(document) + + Args: + document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1.types.Document` + encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate sentence offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1.types.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type=None, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud import language_v1 + >>> + >>> client = language_v1.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_entities(document) + + Args: + document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1.types.Document` + encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1.types.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_syntax(self, document, encoding_type=None, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud import language_v1 + >>> + >>> client = language_v1.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_syntax(document) + + Args: + document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1.types.Document` + encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1.types.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def annotate_text(self, + document, + features, + encoding_type=None, + options=None): + """ + A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + + Example: + >>> from google.cloud import language_v1 + >>> + >>> client = language_v1.LanguageServiceClient() + >>> + >>> document = {} + >>> features = {} + >>> + >>> response = client.annotate_text(document, features) + + Args: + document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1.types.Document` + features (Union[dict, ~google.cloud.language_v1.types.Features]): The enabled features. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1.types.Features` + encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1.types.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/language_v1/gapic/language_service_client_config.py b/language/google/cloud/language_v1/gapic/language_service_client_config.py new file mode 100644 index 000000000000..3c5406c33132 --- /dev/null +++ b/language/google/cloud/language_v1/gapic/language_service_client_config.py @@ -0,0 +1,43 @@ +config = { + "interfaces": { + "google.cloud.language.v1.LanguageService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/gapic/language/v1beta2/__init__.py b/language/google/cloud/language_v1/proto/__init__.py similarity index 100% rename from language/google/cloud/gapic/language/v1beta2/__init__.py rename to language/google/cloud/language_v1/proto/__init__.py diff --git a/language/google/cloud/proto/language/v1/language_service_pb2.py b/language/google/cloud/language_v1/proto/language_service_pb2.py similarity index 78% rename from language/google/cloud/proto/language/v1/language_service_pb2.py rename to language/google/cloud/language_v1/proto/language_service_pb2.py index 98d59f56272c..8c2525bc0439 100644 --- a/language/google/cloud/proto/language/v1/language_service_pb2.py +++ b/language/google/cloud/language_v1/proto/language_service_pb2.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/proto/language/v1/language_service.proto +# source: google/cloud/language_v1/proto/language_service.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) @@ -18,10 +18,10 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/proto/language/v1/language_service.proto', + name='google/cloud/language_v1/proto/language_service.proto', package='google.cloud.language.v1', syntax='proto3', - serialized_pb=_b('\n5google/cloud/proto/language/v1/language_service.proto\x12\x18google.cloud.language.v1\x1a\x1cgoogle/api/annotations.proto\"\xc3\x01\n\x08\x44ocument\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32\'.google.cloud.language.v1.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"t\n\x08Sentence\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12\x36\n\tsentiment\x18\x02 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\"\x86\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x04type\x18\x02 \x01(\x0e\x32%.google.cloud.language.v1.Entity.Type\x12@\n\x08metadata\x18\x03 \x03(\x0b\x32..google.cloud.language.v1.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12\x39\n\x08mentions\x18\x05 \x03(\x0b\x32\'.google.cloud.language.v1.EntityMention\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xcb\x01\n\x05Token\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12>\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32&.google.cloud.language.v1.PartOfSpeech\x12\x41\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xa3\x10\n\x0cPartOfSpeech\x12\x37\n\x03tag\x18\x01 \x01(\x0e\x32*.google.cloud.language.v1.PartOfSpeech.Tag\x12=\n\x06\x61spect\x18\x02 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Aspect\x12\x39\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Case\x12\x39\n\x04\x66orm\x18\x04 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Form\x12=\n\x06gender\x18\x05 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Gender\x12\x39\n\x04mood\x18\x06 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Mood\x12=\n\x06number\x18\x07 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Number\x12=\n\x06person\x18\x08 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Person\x12=\n\x06proper\x18\t \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Proper\x12G\n\x0breciprocity\x18\n \x01(\x0e\x32\x32.google.cloud.language.v1.PartOfSpeech.Reciprocity\x12;\n\x05tense\x18\x0b \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Tense\x12;\n\x05voice\x18\x0c \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xd8\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12=\n\x05label\x18\x02 \x01(\x0e\x32..google.cloud.language.v1.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xaf\x01\n\rEntityMention\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.language.v1.EntityMention.Type\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x8e\x01\n\x17\x41nalyzeSentimentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\xa4\x01\n\x18\x41nalyzeSentimentResponse\x12?\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x35\n\tsentences\x18\x03 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\"\x8d\x01\n\x16\x41nalyzeEntitiesRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"_\n\x17\x41nalyzeEntitiesResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x8b\x01\n\x14\x41nalyzeSyntaxRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\x91\x01\n\x15\x41nalyzeSyntaxResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xb6\x02\n\x13\x41nnotateTextRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12H\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x36.google.cloud.language.v1.AnnotateTextRequest.Features\x12=\n\rencoding_type\x18\x03 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\x1a`\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\"\x85\x02\n\x14\x41nnotateTextResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x32\n\x08\x65ntities\x18\x03 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12?\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8d\x05\n\x0fLanguageService\x12\xa4\x01\n\x10\x41nalyzeSentiment\x12\x31.google.cloud.language.v1.AnalyzeSentimentRequest\x1a\x32.google.cloud.language.v1.AnalyzeSentimentResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/documents:analyzeSentiment:\x01*\x12\xa0\x01\n\x0f\x41nalyzeEntities\x12\x30.google.cloud.language.v1.AnalyzeEntitiesRequest\x1a\x31.google.cloud.language.v1.AnalyzeEntitiesResponse\"(\x82\xd3\xe4\x93\x02\"\"\x1d/v1/documents:analyzeEntities:\x01*\x12\x98\x01\n\rAnalyzeSyntax\x12..google.cloud.language.v1.AnalyzeSyntaxRequest\x1a/.google.cloud.language.v1.AnalyzeSyntaxResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/documents:analyzeSyntax:\x01*\x12\x94\x01\n\x0c\x41nnotateText\x12-.google.cloud.language.v1.AnnotateTextRequest\x1a..google.cloud.language.v1.AnnotateTextResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/documents:annotateText:\x01*Bx\n\x1c\x63om.google.cloud.language.v1B\x14LanguageServiceProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/language/v1;languageb\x06proto3') + serialized_pb=_b('\n5google/cloud/language_v1/proto/language_service.proto\x12\x18google.cloud.language.v1\x1a\x1cgoogle/api/annotations.proto\"\xc3\x01\n\x08\x44ocument\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32\'.google.cloud.language.v1.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"t\n\x08Sentence\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12\x36\n\tsentiment\x18\x02 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\"\xbe\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x04type\x18\x02 \x01(\x0e\x32%.google.cloud.language.v1.Entity.Type\x12@\n\x08metadata\x18\x03 \x03(\x0b\x32..google.cloud.language.v1.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12\x39\n\x08mentions\x18\x05 \x03(\x0b\x32\'.google.cloud.language.v1.EntityMention\x12\x36\n\tsentiment\x18\x06 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xcb\x01\n\x05Token\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12>\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32&.google.cloud.language.v1.PartOfSpeech\x12\x41\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xa3\x10\n\x0cPartOfSpeech\x12\x37\n\x03tag\x18\x01 \x01(\x0e\x32*.google.cloud.language.v1.PartOfSpeech.Tag\x12=\n\x06\x61spect\x18\x02 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Aspect\x12\x39\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Case\x12\x39\n\x04\x66orm\x18\x04 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Form\x12=\n\x06gender\x18\x05 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Gender\x12\x39\n\x04mood\x18\x06 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Mood\x12=\n\x06number\x18\x07 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Number\x12=\n\x06person\x18\x08 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Person\x12=\n\x06proper\x18\t \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Proper\x12G\n\x0breciprocity\x18\n \x01(\x0e\x32\x32.google.cloud.language.v1.PartOfSpeech.Reciprocity\x12;\n\x05tense\x18\x0b \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Tense\x12;\n\x05voice\x18\x0c \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\x95\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12=\n\x05label\x18\x02 \x01(\x0e\x32..google.cloud.language.v1.DependencyEdge.Label\"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R\"\xe7\x01\n\rEntityMention\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.language.v1.EntityMention.Type\x12\x36\n\tsentiment\x18\x03 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x8e\x01\n\x17\x41nalyzeSentimentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\xa4\x01\n\x18\x41nalyzeSentimentResponse\x12?\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x35\n\tsentences\x18\x03 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\"\x94\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"f\n\x1e\x41nalyzeEntitySentimentResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x8d\x01\n\x16\x41nalyzeEntitiesRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"_\n\x17\x41nalyzeEntitiesResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x8b\x01\n\x14\x41nalyzeSyntaxRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\x91\x01\n\x15\x41nalyzeSyntaxResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xd9\x02\n\x13\x41nnotateTextRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12H\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x36.google.cloud.language.v1.AnnotateTextRequest.Features\x12=\n\rencoding_type\x18\x03 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\x1a\x82\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\"\x85\x02\n\x14\x41nnotateTextResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x32\n\x08\x65ntities\x18\x03 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12?\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\xcc\x06\n\x0fLanguageService\x12\xa4\x01\n\x10\x41nalyzeSentiment\x12\x31.google.cloud.language.v1.AnalyzeSentimentRequest\x1a\x32.google.cloud.language.v1.AnalyzeSentimentResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/documents:analyzeSentiment:\x01*\x12\xa0\x01\n\x0f\x41nalyzeEntities\x12\x30.google.cloud.language.v1.AnalyzeEntitiesRequest\x1a\x31.google.cloud.language.v1.AnalyzeEntitiesResponse\"(\x82\xd3\xe4\x93\x02\"\"\x1d/v1/documents:analyzeEntities:\x01*\x12\xbc\x01\n\x16\x41nalyzeEntitySentiment\x12\x37.google.cloud.language.v1.AnalyzeEntitySentimentRequest\x1a\x38.google.cloud.language.v1.AnalyzeEntitySentimentResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/documents:analyzeEntitySentiment:\x01*\x12\x98\x01\n\rAnalyzeSyntax\x12..google.cloud.language.v1.AnalyzeSyntaxRequest\x1a/.google.cloud.language.v1.AnalyzeSyntaxResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/documents:analyzeSyntax:\x01*\x12\x94\x01\n\x0c\x41nnotateText\x12-.google.cloud.language.v1.AnnotateTextRequest\x1a..google.cloud.language.v1.AnnotateTextResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/documents:annotateText:\x01*Bx\n\x1c\x63om.google.cloud.language.v1B\x14LanguageServiceProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/language/v1;languageb\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -51,8 +51,8 @@ ], containing_type=None, options=None, - serialized_start=5797, - serialized_end=5853, + serialized_start=6260, + serialized_end=6316, ) _sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) @@ -130,8 +130,8 @@ ], containing_type=None, options=None, - serialized_start=699, - serialized_end=820, + serialized_start=755, + serialized_end=876, ) _sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) @@ -200,8 +200,8 @@ ], containing_type=None, options=None, - serialized_start=1837, - serialized_end=1978, + serialized_start=1893, + serialized_end=2034, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) @@ -230,8 +230,8 @@ ], containing_type=None, options=None, - serialized_start=1980, - serialized_end=2059, + serialized_start=2036, + serialized_end=2115, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) @@ -304,8 +304,8 @@ ], containing_type=None, options=None, - serialized_start=2062, - serialized_end=2310, + serialized_start=2118, + serialized_end=2366, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) @@ -366,8 +366,8 @@ ], containing_type=None, options=None, - serialized_start=2313, - serialized_end=2488, + serialized_start=2369, + serialized_end=2544, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) @@ -396,8 +396,8 @@ ], containing_type=None, options=None, - serialized_start=2490, - serialized_end=2559, + serialized_start=2546, + serialized_end=2615, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) @@ -438,8 +438,8 @@ ], containing_type=None, options=None, - serialized_start=2561, - serialized_end=2688, + serialized_start=2617, + serialized_end=2744, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) @@ -468,8 +468,8 @@ ], containing_type=None, options=None, - serialized_start=2690, - serialized_end=2754, + serialized_start=2746, + serialized_end=2810, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) @@ -502,8 +502,8 @@ ], containing_type=None, options=None, - serialized_start=2756, - serialized_end=2840, + serialized_start=2812, + serialized_end=2896, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) @@ -528,8 +528,8 @@ ], containing_type=None, options=None, - serialized_start=2842, - serialized_end=2898, + serialized_start=2898, + serialized_end=2954, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) @@ -554,8 +554,8 @@ ], containing_type=None, options=None, - serialized_start=2900, - serialized_end=2974, + serialized_start=2956, + serialized_end=3030, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) @@ -596,8 +596,8 @@ ], containing_type=None, options=None, - serialized_start=2976, - serialized_end=3091, + serialized_start=3032, + serialized_end=3147, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) @@ -626,8 +626,8 @@ ], containing_type=None, options=None, - serialized_start=3093, - serialized_end=3159, + serialized_start=3149, + serialized_end=3215, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) @@ -945,11 +945,35 @@ name='DISLOCATED', index=76, number=76, options=None, type=None), + _descriptor.EnumValueDescriptor( + name='ASP', index=77, number=77, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GMOD', index=78, number=78, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOBJ', index=79, number=79, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INFMOD', index=80, number=80, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MES', index=81, number=81, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NCOMP', index=82, number=82, + options=None, + type=None), ], containing_type=None, options=None, - serialized_start=3270, - serialized_end=4146, + serialized_start=3326, + serialized_end=4263, ) _sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) @@ -974,8 +998,8 @@ ], containing_type=None, options=None, - serialized_start=4276, - serialized_end=4324, + serialized_start=4449, + serialized_end=4497, ) _sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) @@ -1107,8 +1131,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=650, - serialized_end=697, + serialized_start=706, + serialized_end=753, ) _ENTITY = _descriptor.Descriptor( @@ -1153,6 +1177,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1.Entity.sentiment', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1167,7 +1198,7 @@ oneofs=[ ], serialized_start=430, - serialized_end=820, + serialized_end=876, ) @@ -1218,8 +1249,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=823, - serialized_end=1026, + serialized_start=879, + serialized_end=1082, ) @@ -1256,8 +1287,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1028, - serialized_end=1073, + serialized_start=1084, + serialized_end=1129, ) @@ -1376,8 +1407,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1076, - serialized_end=3159, + serialized_start=1132, + serialized_end=3215, ) @@ -1415,8 +1446,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3162, - serialized_end=4146, + serialized_start=3218, + serialized_end=4263, ) @@ -1441,6 +1472,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1.EntityMention.sentiment', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1454,8 +1492,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4149, - serialized_end=4324, + serialized_start=4266, + serialized_end=4497, ) @@ -1492,8 +1530,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4326, - serialized_end=4375, + serialized_start=4499, + serialized_end=4548, ) @@ -1530,8 +1568,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4378, - serialized_end=4520, + serialized_start=4551, + serialized_end=4693, ) @@ -1575,8 +1613,84 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4523, - serialized_end=4687, + serialized_start=4696, + serialized_end=4860, +) + + +_ANALYZEENTITYSENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitySentimentRequest', + full_name='google.cloud.language.v1.AnalyzeEntitySentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeEntitySentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeEntitySentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4863, + serialized_end=5011, +) + + +_ANALYZEENTITYSENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitySentimentResponse', + full_name='google.cloud.language.v1.AnalyzeEntitySentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1.AnalyzeEntitySentimentResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeEntitySentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5013, + serialized_end=5115, ) @@ -1613,8 +1727,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4690, - serialized_end=4831, + serialized_start=5118, + serialized_end=5259, ) @@ -1651,8 +1765,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4833, - serialized_end=4928, + serialized_start=5261, + serialized_end=5356, ) @@ -1689,8 +1803,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4931, - serialized_end=5070, + serialized_start=5359, + serialized_end=5498, ) @@ -1734,8 +1848,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5073, - serialized_end=5218, + serialized_start=5501, + serialized_end=5646, ) @@ -1767,6 +1881,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='extract_entity_sentiment', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1779,8 +1900,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5435, - serialized_end=5531, + serialized_start=5864, + serialized_end=5994, ) _ANNOTATETEXTREQUEST = _descriptor.Descriptor( @@ -1823,8 +1944,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5221, - serialized_end=5531, + serialized_start=5649, + serialized_end=5994, ) @@ -1882,8 +2003,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5534, - serialized_end=5795, + serialized_start=5997, + serialized_end=6258, ) _DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE @@ -1900,6 +2021,7 @@ _ENTITY.fields_by_name['type'].enum_type = _ENTITY_TYPE _ENTITY.fields_by_name['metadata'].message_type = _ENTITY_METADATAENTRY _ENTITY.fields_by_name['mentions'].message_type = _ENTITYMENTION +_ENTITY.fields_by_name['sentiment'].message_type = _SENTIMENT _ENTITY_TYPE.containing_type = _ENTITY _TOKEN.fields_by_name['text'].message_type = _TEXTSPAN _TOKEN.fields_by_name['part_of_speech'].message_type = _PARTOFSPEECH @@ -1932,11 +2054,15 @@ _DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE _ENTITYMENTION.fields_by_name['text'].message_type = _TEXTSPAN _ENTITYMENTION.fields_by_name['type'].enum_type = _ENTITYMENTION_TYPE +_ENTITYMENTION.fields_by_name['sentiment'].message_type = _SENTIMENT _ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION _ANALYZESENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT _ANALYZESENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE _ANALYZESENTIMENTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT _ANALYZESENTIMENTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITYSENTIMENTRESPONSE.fields_by_name['entities'].message_type = _ENTITY _ANALYZEENTITIESREQUEST.fields_by_name['document'].message_type = _DOCUMENT _ANALYZEENTITIESREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE _ANALYZEENTITIESRESPONSE.fields_by_name['entities'].message_type = _ENTITY @@ -1963,6 +2089,8 @@ DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentRequest'] = _ANALYZEENTITYSENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentResponse'] = _ANALYZEENTITYSENTIMENTRESPONSE DESCRIPTOR.message_types_by_name['AnalyzeEntitiesRequest'] = _ANALYZEENTITIESREQUEST DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST @@ -1973,7 +2101,7 @@ Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( DESCRIPTOR = _DOCUMENT, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents the input to API methods. @@ -1996,12 +2124,12 @@ language: The language of the document (if not specified, the language is automatically detected). Both ISO and BCP-47 language codes - are accepted. `Language Support - `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or automatically - detected) is not supported by the called API method, an - ``INVALID_ARGUMENT`` error is returned. + are accepted. `Language Support `__ lists currently supported + languages for each API method. If the language (either + specified by the caller or automatically detected) is not + supported by the called API method, an ``INVALID_ARGUMENT`` + error is returned. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Document) )) @@ -2009,7 +2137,7 @@ Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( DESCRIPTOR = _SENTENCE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents a sentence in the input document. @@ -2032,12 +2160,12 @@ MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( DESCRIPTOR = _ENTITY_METADATAENTRY, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity.MetadataEntry) )) , DESCRIPTOR = _ENTITY, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents a phrase in the text that is a known entity, such as a person, an organization, or location. The API associates information, @@ -2062,6 +2190,12 @@ mentions: The mentions of this entity in the input document. The API currently supports proper noun mentions. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1.AnnotateTextRequest.Features.extract\_entity\_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided document. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity) )) @@ -2070,7 +2204,7 @@ Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( DESCRIPTOR = _TOKEN, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents the smallest syntactic building block of the text. @@ -2093,7 +2227,7 @@ Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( DESCRIPTOR = _SENTIMENT, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents the feeling associated with the entire text or entities in the text. @@ -2114,7 +2248,7 @@ PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( DESCRIPTOR = _PARTOFSPEECH, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents part of speech information for a token. Parts of speech are as defined in @@ -2153,7 +2287,7 @@ DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( DESCRIPTOR = _DEPENDENCYEDGE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents dependency parse tree information for a token. (For more information on dependency labels, see @@ -2176,7 +2310,7 @@ EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( DESCRIPTOR = _ENTITYMENTION, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents a mention for an entity in the text. Currently, proper noun mentions are supported. @@ -2187,6 +2321,13 @@ The mention text. type: The type of the entity mention. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1.AnnotateTextRequest.Features.extract\_entity\_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1.EntityMention) )) @@ -2194,7 +2335,7 @@ TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( DESCRIPTOR = _TEXTSPAN, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """Represents an output piece of text. @@ -2214,7 +2355,7 @@ AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZESENTIMENTREQUEST, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The sentiment analysis request message. @@ -2232,7 +2373,7 @@ AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The sentiment analysis response message. @@ -2252,9 +2393,47 @@ )) _sym_db.RegisterMessage(AnalyzeSentimentResponse) +AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTREQUEST, + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitySentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentRequest) + +AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTRESPONSE, + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis response message. + + + Attributes: + entities: + The recognized entities in the input document with associated + sentiments. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitySentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentResponse) + AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITIESREQUEST, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The entity analysis request message. @@ -2271,7 +2450,7 @@ AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITIESRESPONSE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The entity analysis response message. @@ -2291,7 +2470,7 @@ AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZESYNTAXREQUEST, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The syntax analysis request message. @@ -2308,7 +2487,7 @@ AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZESYNTAXRESPONSE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The syntax analysis response message. @@ -2333,7 +2512,7 @@ Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific analysis for the @@ -2343,7 +2522,7 @@ )) , DESCRIPTOR = _ANNOTATETEXTREQUEST, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The request message for the text annotation API, which can perform multiple analysis types (sentiment, entities, and syntax) in one call. @@ -2356,6 +2535,8 @@ Extract entities. extract_document_sentiment: Extract document-level sentiment. + extract_entity_sentiment: + Extract entities and their associated sentiment. document: Input document. features: @@ -2370,7 +2551,7 @@ AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( DESCRIPTOR = _ANNOTATETEXTRESPONSE, - __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + __module__ = 'google.cloud.language_v1.proto.language_service_pb2' , __doc__ = """The text annotations response message. @@ -2441,6 +2622,11 @@ def __init__(self, channel): request_serializer=AnalyzeEntitiesRequest.SerializeToString, response_deserializer=AnalyzeEntitiesResponse.FromString, ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=AnalyzeEntitySentimentResponse.FromString, + ) self.AnalyzeSyntax = channel.unary_unary( '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', request_serializer=AnalyzeSyntaxRequest.SerializeToString, @@ -2474,6 +2660,14 @@ def AnalyzeEntities(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def AnalyzeSyntax(self, request, context): """Analyzes the syntax of the text and provides sentence boundaries and tokenization along with part of speech tags, dependency trees, and other @@ -2504,6 +2698,11 @@ def add_LanguageServiceServicer_to_server(servicer, server): request_deserializer=AnalyzeEntitiesRequest.FromString, response_serializer=AnalyzeEntitiesResponse.SerializeToString, ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=AnalyzeEntitySentimentRequest.FromString, + response_serializer=AnalyzeEntitySentimentResponse.SerializeToString, + ), 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( servicer.AnalyzeSyntax, request_deserializer=AnalyzeSyntaxRequest.FromString, @@ -2539,6 +2738,11 @@ def AnalyzeEntities(self, request, context): other properties. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def AnalyzeSyntax(self, request, context): """Analyzes the syntax of the text and provides sentence boundaries and tokenization along with part of speech tags, dependency trees, and other @@ -2573,6 +2777,12 @@ def AnalyzeEntities(self, request, timeout, metadata=None, with_call=False, prot """ raise NotImplementedError() AnalyzeEntities.future = None + def AnalyzeEntitySentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + raise NotImplementedError() + AnalyzeEntitySentiment.future = None def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Analyzes the syntax of the text and provides sentence boundaries and tokenization along with part of speech tags, dependency trees, and other @@ -2596,18 +2806,21 @@ def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, defa generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" request_deserializers = { ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.FromString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, } response_serializers = { ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, } method_implementations = { ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntitySentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeEntitySentiment), ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), ('google.cloud.language.v1.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), @@ -2624,18 +2837,21 @@ def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=No generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" request_serializers = { ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, } response_deserializers = { ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.FromString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, } cardinalities = { 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeEntitySentiment': cardinality.Cardinality.UNARY_UNARY, 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, diff --git a/language/google/cloud/language_v1/proto/language_service_pb2_grpc.py b/language/google/cloud/language_v1/proto/language_service_pb2_grpc.py new file mode 100644 index 000000000000..937c7b54c7db --- /dev/null +++ b/language/google/cloud/language_v1/proto/language_service_pb2_grpc.py @@ -0,0 +1,122 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.language_v1.proto.language_service_pb2 as google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/google/cloud/language_v1/types.py b/language/google/cloud/language_v1/types.py index 6223f6846e09..16e40931d0f9 100644 --- a/language/google/cloud/language_v1/types.py +++ b/language/google/cloud/language_v1/types.py @@ -15,16 +15,20 @@ from __future__ import absolute_import import sys -from google.cloud.proto.language.v1 import language_service_pb2 - from google.gax.utils.messages import get_messages +from google.api import http_pb2 +from google.cloud.language_v1.proto import language_service_pb2 +from google.protobuf import descriptor_pb2 names = [] -for name, message in get_messages(language_service_pb2).items(): - message.__module__ = 'google.cloud.language_v1.types' - setattr(sys.modules[__name__], name, message) - names.append(name) - +for module in ( + http_pb2, + language_service_pb2, + descriptor_pb2, ): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.language_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) __all__ = tuple(sorted(names)) diff --git a/language/google/cloud/language_v1beta2/__init__.py b/language/google/cloud/language_v1beta2/__init__.py index e0a3e4cc287a..a6c06cbf42c1 100644 --- a/language/google/cloud/language_v1beta2/__init__.py +++ b/language/google/cloud/language_v1beta2/__init__.py @@ -14,17 +14,17 @@ from __future__ import absolute_import -from google.cloud.gapic.language.v1beta2 import language_service_client as lsc -from google.cloud.gapic.language.v1beta2 import enums - from google.cloud.language_v1beta2 import types +from google.cloud.language_v1beta2.gapic import enums +from google.cloud.language_v1beta2.gapic import language_service_client -LanguageServiceClient = lsc.LanguageServiceClient +class LanguageServiceClient(language_service_client.LanguageServiceClient): + __doc__ = language_service_client.LanguageServiceClient.__doc__ + enums = enums __all__ = ( 'enums', - 'LanguageServiceClient', 'types', -) + 'LanguageServiceClient', ) diff --git a/language/google/cloud/language_v1beta2/gapic/__init__.py b/language/google/cloud/language_v1beta2/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/gapic/language/v1/enums.py b/language/google/cloud/language_v1beta2/gapic/enums.py similarity index 94% rename from language/google/cloud/gapic/language/v1/enums.py rename to language/google/cloud/language_v1beta2/gapic/enums.py index 2b53e4d913bb..689033aa4db2 100644 --- a/language/google/cloud/gapic/language/v1/enums.py +++ b/language/google/cloud/language_v1beta2/gapic/enums.py @@ -1,10 +1,10 @@ -# Copyright 2016 Google Inc. All rights reserved. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,16 +24,16 @@ class EncodingType(object): Attributes: NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as - ``begin_offset``) will be set at ``-1``. + ``begin_offset``) will be set at ``-1``. UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-8 encoding of the input. C++ and Go are examples of languages - that use this encoding natively. + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-16 encoding of the input. Java and Javascript are examples of - languages that use this encoding natively. + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based - on the UTF-32 encoding of the input. Python is an example of a language - that uses this encoding natively. + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. """ NONE = 0 UTF8 = 1 @@ -292,7 +292,7 @@ class Reciprocity(object): Attributes: RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not - predicted. + predicted. RECIPROCAL (int): Reciprocal NON_RECIPROCAL (int): Non-reciprocal """ @@ -421,6 +421,12 @@ class Label(object): NUMC (int): Compound of numeric modifier COP (int): Copula DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + ASP (int): Aspect marker + GMOD (int): Genitive modifier + GOBJ (int): Genitive object + INFMOD (int): Infinitival modifier + MES (int): Measure + NCOMP (int): Nominal complement of a noun """ UNKNOWN = 0 ABBREV = 1 @@ -499,6 +505,12 @@ class Label(object): NUMC = 74 COP = 75 DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 class EntityMention(object): diff --git a/language/google/cloud/language_v1beta2/gapic/language_service_client.py b/language/google/cloud/language_v1beta2/gapic/language_service_client.py new file mode 100644 index 000000000000..6e735efc52b7 --- /dev/null +++ b/language/google/cloud/language_v1beta2/gapic/language_service_client.py @@ -0,0 +1,362 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1beta2/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1beta2 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.language_v1beta2.gapic import enums +from google.cloud.language_v1beta2.gapic import language_service_client_config +from google.cloud.language_v1beta2.proto import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + channel (~grpc.Channel): A ``Channel`` instance through + which to make calls. + credentials (~google.auth.credentials.Credentials): The authorization + credentials to attach to requests. These credentials identify this + application to the service. + ssl_credentials (~grpc.ChannelCredentials): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (Sequence[str]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + lib_name (str): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (str): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: LanguageServiceClient + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + defaults = api_callable.construct_settings( + 'google.cloud.language.v1beta2.LanguageService', + language_service_client_config.config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=self.SERVICE_ADDRESS, + service_port=self.DEFAULT_SERVICE_PORT, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_entity_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntitySentiment, + settings=defaults['analyze_entity_sentiment']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._classify_text = api_callable.create_api_call( + self.language_service_stub.ClassifyText, + settings=defaults['classify_text']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_sentiment(document) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + encoding_type (~google.cloud.language_v1beta2.types.EncodingType): The encoding type used by the API to calculate sentence offsets for the + sentence sentiment. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type=None, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_entities(document) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + encoding_type (~google.cloud.language_v1beta2.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_entity_sentiment(self, + document, + encoding_type=None, + options=None): + """ + Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes + sentiment associated with each entity and its mentions. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_entity_sentiment(document) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + encoding_type (~google.cloud.language_v1beta2.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeEntitySentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entity_sentiment(request, options) + + def analyze_syntax(self, document, encoding_type=None, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.analyze_syntax(document) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + encoding_type (~google.cloud.language_v1beta2.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def classify_text(self, document, options=None): + """ + Classifies a document into categories. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> + >>> response = client.classify_text(document) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.ClassifyTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.ClassifyTextRequest(document=document) + return self._classify_text(request, options) + + def annotate_text(self, + document, + features, + encoding_type=None, + options=None): + """ + A convenience method that provides all syntax, sentiment, entity, and + classification features in one call. + + Example: + >>> from google.cloud import language_v1beta2 + >>> + >>> client = language_v1beta2.LanguageServiceClient() + >>> + >>> document = {} + >>> features = {} + >>> + >>> response = client.annotate_text(document, features) + + Args: + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Document` + features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): The enabled features. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.language_v1beta2.types.Features` + encoding_type (~google.cloud.language_v1beta2.types.EncodingType): The encoding type used by the API to calculate offsets. + options (~google.gax.CallOptions): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`~google.cloud.language_v1beta2.types.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/language_v1beta2/gapic/language_service_client_config.py b/language/google/cloud/language_v1beta2/gapic/language_service_client_config.py new file mode 100644 index 000000000000..f3473d4a2667 --- /dev/null +++ b/language/google/cloud/language_v1beta2/gapic/language_service_client_config.py @@ -0,0 +1,53 @@ +config = { + "interfaces": { + "google.cloud.language.v1beta2.LanguageService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntitySentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ClassifyText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/language_v1beta2/proto/__init__.py b/language/google/cloud/language_v1beta2/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py similarity index 86% rename from language/google/cloud/proto/language/v1beta2/language_service_pb2.py rename to language/google/cloud/language_v1beta2/proto/language_service_pb2.py index d3e1d150af8d..92caa1bc235f 100644 --- a/language/google/cloud/proto/language/v1beta2/language_service_pb2.py +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/proto/language/v1beta2/language_service.proto +# source: google/cloud/language_v1beta2/proto/language_service.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) @@ -21,10 +21,10 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/proto/language/v1beta2/language_service.proto', + name='google/cloud/language_v1beta2/proto/language_service.proto', package='google.cloud.language.v1beta2', syntax='proto3', - serialized_pb=_b('\n:google/cloud/proto/language/v1beta2/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xdd\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xe8\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x82\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\"\x99\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x97\x07\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse\".\x82\xd3\xe4\x93\x02(\"#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse\"4\x82\xd3\xe4\x93\x02.\")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse\"+\x82\xd3\xe4\x93\x02%\" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3') + serialized_pb=_b('\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label\"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R\"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"P\n\x13\x43lassifyTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory\"\xff\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08\"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\xbd\x08\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse\".\x82\xd3\xe4\x93\x02(\"#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse\"4\x82\xd3\xe4\x93\x02.\")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse\"+\x82\xd3\xe4\x93\x02%\" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1beta2/documents:classifyText:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -54,8 +54,8 @@ ], containing_type=None, options=None, - serialized_start=6539, - serialized_end=6595, + serialized_start=6939, + serialized_end=6995, ) _sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) @@ -948,11 +948,35 @@ name='DISLOCATED', index=76, number=76, options=None, type=None), + _descriptor.EnumValueDescriptor( + name='ASP', index=77, number=77, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GMOD', index=78, number=78, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOBJ', index=79, number=79, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INFMOD', index=80, number=80, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MES', index=81, number=81, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NCOMP', index=82, number=82, + options=None, + type=None), ], containing_type=None, options=None, serialized_start=3546, - serialized_end=4422, + serialized_end=4483, ) _sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) @@ -977,8 +1001,8 @@ ], containing_type=None, options=None, - serialized_start=4623, - serialized_end=4671, + serialized_start=4684, + serialized_end=4732, ) _sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) @@ -1426,7 +1450,7 @@ oneofs=[ ], serialized_start=3433, - serialized_end=4422, + serialized_end=4483, ) @@ -1471,8 +1495,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4425, - serialized_end=4671, + serialized_start=4486, + serialized_end=4732, ) @@ -1509,8 +1533,46 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4673, - serialized_end=4722, + serialized_start=4734, + serialized_end=4783, +) + + +_CLASSIFICATIONCATEGORY = _descriptor.Descriptor( + name='ClassificationCategory', + full_name='google.cloud.language.v1beta2.ClassificationCategory', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.language.v1beta2.ClassificationCategory.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='confidence', full_name='google.cloud.language.v1beta2.ClassificationCategory.confidence', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4785, + serialized_end=4843, ) @@ -1547,8 +1609,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4725, - serialized_end=4877, + serialized_start=4846, + serialized_end=4998, ) @@ -1592,8 +1654,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4880, - serialized_end=5054, + serialized_start=5001, + serialized_end=5175, ) @@ -1630,8 +1692,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5057, - serialized_end=5215, + serialized_start=5178, + serialized_end=5336, ) @@ -1668,8 +1730,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5217, - serialized_end=5324, + serialized_start=5338, + serialized_end=5445, ) @@ -1706,8 +1768,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5327, - serialized_end=5478, + serialized_start=5448, + serialized_end=5599, ) @@ -1744,8 +1806,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5480, - serialized_end=5580, + serialized_start=5601, + serialized_end=5701, ) @@ -1782,8 +1844,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5583, - serialized_end=5732, + serialized_start=5704, + serialized_end=5853, ) @@ -1827,8 +1889,70 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5735, - serialized_end=5890, + serialized_start=5856, + serialized_end=6011, +) + + +_CLASSIFYTEXTREQUEST = _descriptor.Descriptor( + name='ClassifyTextRequest', + full_name='google.cloud.language.v1beta2.ClassifyTextRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.ClassifyTextRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6013, + serialized_end=6093, +) + + +_CLASSIFYTEXTRESPONSE = _descriptor.Descriptor( + name='ClassifyTextResponse', + full_name='google.cloud.language.v1beta2.ClassifyTextResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='categories', full_name='google.cloud.language.v1beta2.ClassifyTextResponse.categories', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6095, + serialized_end=6192, ) @@ -1867,6 +1991,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='classify_text', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.classify_text', index=4, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1879,8 +2010,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=6123, - serialized_end=6253, + serialized_start=6425, + serialized_end=6578, ) _ANNOTATETEXTREQUEST = _descriptor.Descriptor( @@ -1923,8 +2054,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=5893, - serialized_end=6253, + serialized_start=6195, + serialized_end=6578, ) @@ -1970,6 +2101,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='categories', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.categories', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1982,8 +2120,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=6256, - serialized_end=6537, + serialized_start=6581, + serialized_end=6937, ) _DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE @@ -2049,6 +2187,8 @@ _ANALYZESYNTAXREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE _ANALYZESYNTAXRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE _ANALYZESYNTAXRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_CLASSIFYTEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_CLASSIFYTEXTRESPONSE.fields_by_name['categories'].message_type = _CLASSIFICATIONCATEGORY _ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST _ANNOTATETEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT _ANNOTATETEXTREQUEST.fields_by_name['features'].message_type = _ANNOTATETEXTREQUEST_FEATURES @@ -2057,6 +2197,7 @@ _ANNOTATETEXTRESPONSE.fields_by_name['tokens'].message_type = _TOKEN _ANNOTATETEXTRESPONSE.fields_by_name['entities'].message_type = _ENTITY _ANNOTATETEXTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +_ANNOTATETEXTRESPONSE.fields_by_name['categories'].message_type = _CLASSIFICATIONCATEGORY DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT DESCRIPTOR.message_types_by_name['Sentence'] = _SENTENCE DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY @@ -2066,6 +2207,7 @@ DESCRIPTOR.message_types_by_name['DependencyEdge'] = _DEPENDENCYEDGE DESCRIPTOR.message_types_by_name['EntityMention'] = _ENTITYMENTION DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN +DESCRIPTOR.message_types_by_name['ClassificationCategory'] = _CLASSIFICATIONCATEGORY DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentRequest'] = _ANALYZEENTITYSENTIMENTREQUEST @@ -2074,13 +2216,15 @@ DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST DESCRIPTOR.message_types_by_name['AnalyzeSyntaxResponse'] = _ANALYZESYNTAXRESPONSE +DESCRIPTOR.message_types_by_name['ClassifyTextRequest'] = _CLASSIFYTEXTREQUEST +DESCRIPTOR.message_types_by_name['ClassifyTextResponse'] = _CLASSIFYTEXTRESPONSE DESCRIPTOR.message_types_by_name['AnnotateTextRequest'] = _ANNOTATETEXTREQUEST DESCRIPTOR.message_types_by_name['AnnotateTextResponse'] = _ANNOTATETEXTRESPONSE DESCRIPTOR.enum_types_by_name['EncodingType'] = _ENCODINGTYPE Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( DESCRIPTOR = _DOCUMENT, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents the input to API methods. @@ -2103,12 +2247,12 @@ language: The language of the document (if not specified, the language is automatically detected). Both ISO and BCP-47 language codes - are accepted. `Language Support - `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or automatically - detected) is not supported by the called API method, an - ``INVALID_ARGUMENT`` error is returned. + are accepted. `Language Support `__ lists currently supported + languages for each API method. If the language (either + specified by the caller or automatically detected) is not + supported by the called API method, an ``INVALID_ARGUMENT`` + error is returned. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Document) )) @@ -2116,7 +2260,7 @@ Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( DESCRIPTOR = _SENTENCE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents a sentence in the input document. @@ -2139,12 +2283,12 @@ MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( DESCRIPTOR = _ENTITY_METADATAENTRY, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity.MetadataEntry) )) , DESCRIPTOR = _ENTITY, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents a phrase in the text that is a known entity, such as a person, an organization, or location. The API associates information, @@ -2183,7 +2327,7 @@ Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( DESCRIPTOR = _TOKEN, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents the smallest syntactic building block of the text. @@ -2206,7 +2350,7 @@ Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( DESCRIPTOR = _SENTIMENT, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents the feeling associated with the entire text or entities in the text. @@ -2227,7 +2371,7 @@ PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( DESCRIPTOR = _PARTOFSPEECH, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents part of speech information for a token. @@ -2264,7 +2408,7 @@ DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( DESCRIPTOR = _DEPENDENCYEDGE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents dependency parse tree information for a token. @@ -2285,7 +2429,7 @@ EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( DESCRIPTOR = _ENTITYMENTION, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents a mention for an entity in the text. Currently, proper noun mentions are supported. @@ -2310,7 +2454,7 @@ TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( DESCRIPTOR = _TEXTSPAN, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """Represents an output piece of text. @@ -2328,9 +2472,28 @@ )) _sym_db.RegisterMessage(TextSpan) +ClassificationCategory = _reflection.GeneratedProtocolMessageType('ClassificationCategory', (_message.Message,), dict( + DESCRIPTOR = _CLASSIFICATIONCATEGORY, + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' + , + __doc__ = """Represents a category returned from the text classifier. + + + Attributes: + name: + The name of the category representing the document. + confidence: + The classifier's confidence of the category. Number represents + how certain the classifier is that this category represents + the given text. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassificationCategory) + )) +_sym_db.RegisterMessage(ClassificationCategory) + AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZESENTIMENTREQUEST, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The sentiment analysis request message. @@ -2348,7 +2511,7 @@ AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The sentiment analysis response message. @@ -2371,7 +2534,7 @@ AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITYSENTIMENTREQUEST, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The entity-level sentiment analysis request message. @@ -2388,7 +2551,7 @@ AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITYSENTIMENTRESPONSE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The entity-level sentiment analysis response message. @@ -2410,7 +2573,7 @@ AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITIESREQUEST, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The entity analysis request message. @@ -2427,7 +2590,7 @@ AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZEENTITIESRESPONSE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The entity analysis response message. @@ -2448,7 +2611,7 @@ AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( DESCRIPTOR = _ANALYZESYNTAXREQUEST, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The syntax analysis request message. @@ -2465,7 +2628,7 @@ AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( DESCRIPTOR = _ANALYZESYNTAXRESPONSE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The syntax analysis response message. @@ -2487,11 +2650,41 @@ )) _sym_db.RegisterMessage(AnalyzeSyntaxResponse) +ClassifyTextRequest = _reflection.GeneratedProtocolMessageType('ClassifyTextRequest', (_message.Message,), dict( + DESCRIPTOR = _CLASSIFYTEXTREQUEST, + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' + , + __doc__ = """The document classification request message. + + + Attributes: + document: + Input document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextRequest) + )) +_sym_db.RegisterMessage(ClassifyTextRequest) + +ClassifyTextResponse = _reflection.GeneratedProtocolMessageType('ClassifyTextResponse', (_message.Message,), dict( + DESCRIPTOR = _CLASSIFYTEXTRESPONSE, + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' + , + __doc__ = """The document classification response message. + + + Attributes: + categories: + Categories representing the input document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextResponse) + )) +_sym_db.RegisterMessage(ClassifyTextResponse) + AnnotateTextRequest = _reflection.GeneratedProtocolMessageType('AnnotateTextRequest', (_message.Message,), dict( Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific analysis for the @@ -2501,7 +2694,7 @@ )) , DESCRIPTOR = _ANNOTATETEXTREQUEST, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The request message for the text annotation API, which can perform multiple analysis types (sentiment, entities, and syntax) in one call. @@ -2516,6 +2709,8 @@ Extract document-level sentiment. extract_entity_sentiment: Extract entities and their associated sentiment. + classify_text: + Classify the full document into categories. document: Input document. features: @@ -2530,7 +2725,7 @@ AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( DESCRIPTOR = _ANNOTATETEXTRESPONSE, - __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + __module__ = 'google.cloud.language_v1beta2.proto.language_service_pb2' , __doc__ = """The text annotations response message. @@ -2561,6 +2756,8 @@ automatically-detected language. See [Document.language][googl e.cloud.language.v1beta2.Document.language] field for more details. + categories: + Categories identified in the input document. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextResponse) )) @@ -2612,6 +2809,11 @@ def __init__(self, channel): request_serializer=AnalyzeSyntaxRequest.SerializeToString, response_deserializer=AnalyzeSyntaxResponse.FromString, ) + self.ClassifyText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=ClassifyTextRequest.SerializeToString, + response_deserializer=ClassifyTextResponse.FromString, + ) self.AnnotateText = channel.unary_unary( '/google.cloud.language.v1beta2.LanguageService/AnnotateText', request_serializer=AnnotateTextRequest.SerializeToString, @@ -2657,9 +2859,16 @@ def AnalyzeSyntax(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def ClassifyText(self, request, context): + """Classifies a document into categories. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def AnnotateText(self, request, context): - """A convenience method that provides all syntax, sentiment, and entity - features in one call. + """A convenience method that provides all syntax, sentiment, entity, and + classification features in one call. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -2688,6 +2897,11 @@ def add_LanguageServiceServicer_to_server(servicer, server): request_deserializer=AnalyzeSyntaxRequest.FromString, response_serializer=AnalyzeSyntaxResponse.SerializeToString, ), + 'ClassifyText': grpc.unary_unary_rpc_method_handler( + servicer.ClassifyText, + request_deserializer=ClassifyTextRequest.FromString, + response_serializer=ClassifyTextResponse.SerializeToString, + ), 'AnnotateText': grpc.unary_unary_rpc_method_handler( servicer.AnnotateText, request_deserializer=AnnotateTextRequest.FromString, @@ -2729,9 +2943,13 @@ def AnalyzeSyntax(self, request, context): properties. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ClassifyText(self, request, context): + """Classifies a document into categories. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def AnnotateText(self, request, context): - """A convenience method that provides all syntax, sentiment, and entity - features in one call. + """A convenience method that provides all syntax, sentiment, entity, and + classification features in one call. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) @@ -2770,9 +2988,14 @@ def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protoc """ raise NotImplementedError() AnalyzeSyntax.future = None + def ClassifyText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Classifies a document into categories. + """ + raise NotImplementedError() + ClassifyText.future = None def AnnotateText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """A convenience method that provides all syntax, sentiment, and entity - features in one call. + """A convenience method that provides all syntax, sentiment, entity, and + classification features in one call. """ raise NotImplementedError() AnnotateText.future = None @@ -2790,6 +3013,7 @@ def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, defa ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'ClassifyText'): ClassifyTextRequest.FromString, } response_serializers = { ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, @@ -2797,6 +3021,7 @@ def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, defa ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'ClassifyText'): ClassifyTextResponse.SerializeToString, } method_implementations = { ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), @@ -2804,6 +3029,7 @@ def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, defa ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), + ('google.cloud.language.v1beta2.LanguageService', 'ClassifyText'): face_utilities.unary_unary_inline(servicer.ClassifyText), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) return beta_implementations.server(method_implementations, options=server_options) @@ -2821,6 +3047,7 @@ def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=No ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'ClassifyText'): ClassifyTextRequest.SerializeToString, } response_deserializers = { ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, @@ -2828,6 +3055,7 @@ def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=No ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'ClassifyText'): ClassifyTextResponse.FromString, } cardinalities = { 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, @@ -2835,6 +3063,7 @@ def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=No 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, + 'ClassifyText': cardinality.Cardinality.UNARY_UNARY, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) return beta_implementations.dynamic_stub(channel, 'google.cloud.language.v1beta2.LanguageService', cardinalities, options=stub_options) diff --git a/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py new file mode 100644 index 000000000000..37fffd5bfba9 --- /dev/null +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py @@ -0,0 +1,139 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.language_v1beta2.proto.language_service_pb2 as google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.ClassifyText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ClassifyText(self, request, context): + """Classifies a document into categories. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, entity, and + classification features in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'ClassifyText': grpc.unary_unary_rpc_method_handler( + servicer.ClassifyText, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/google/cloud/language_v1beta2/types.py b/language/google/cloud/language_v1beta2/types.py index 557d05aeb001..9bfa8fb63d8b 100644 --- a/language/google/cloud/language_v1beta2/types.py +++ b/language/google/cloud/language_v1beta2/types.py @@ -15,16 +15,30 @@ from __future__ import absolute_import import sys -from google.cloud.proto.language.v1beta2 import language_service_pb2 - from google.gax.utils.messages import get_messages +from google.api import http_pb2 +from google.cloud.language_v1beta2.proto import language_service_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 names = [] -for name, message in get_messages(language_service_pb2).items(): - message.__module__ = 'google.cloud.language_v1beta2.types' - setattr(sys.modules[__name__], name, message) - names.append(name) - +for module in ( + http_pb2, + language_service_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, ): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.language_v1beta2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) __all__ = tuple(sorted(names)) diff --git a/language/google/cloud/proto/__init__.py b/language/google/cloud/proto/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/language/google/cloud/proto/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/__init__.py b/language/google/cloud/proto/language/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/language/google/cloud/proto/language/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/v1/__init__.py b/language/google/cloud/proto/language/v1/__init__.py deleted file mode 100644 index 8b137891791f..000000000000 --- a/language/google/cloud/proto/language/v1/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py deleted file mode 100644 index 19ab43fae3f0..000000000000 --- a/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py +++ /dev/null @@ -1,104 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -import google.cloud.proto.language.v1.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2 - - -class LanguageServiceStub(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnalyzeSentiment = channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, - ) - self.AnalyzeEntities = channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntities', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, - ) - self.AnalyzeSyntax = channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, - ) - self.AnnotateText = channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnnotateText', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.FromString, - ) - - -class LanguageServiceServicer(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def AnalyzeSentiment(self, request, context): - """Analyzes the sentiment of the provided text. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnalyzeEntities(self, request, context): - """Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnalyzeSyntax(self, request, context): - """Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnnotateText(self, request, context): - """A convenience method that provides all the features that analyzeSentiment, - analyzeEntities, and analyzeSyntax provide in one call. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_LanguageServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSentiment, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, - ), - 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntities, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, - ), - 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSyntax, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, - ), - 'AnnotateText': grpc.unary_unary_rpc_method_handler( - servicer.AnnotateText, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.cloud.language.v1.LanguageService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/google/cloud/proto/language/v1beta2/__init__.py b/language/google/cloud/proto/language/v1beta2/__init__.py deleted file mode 100644 index 8b137891791f..000000000000 --- a/language/google/cloud/proto/language/v1beta2/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py deleted file mode 100644 index 264d6d43f468..000000000000 --- a/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py +++ /dev/null @@ -1,122 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -import google.cloud.proto.language.v1beta2.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2 - - -class LanguageServiceStub(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnalyzeSentiment = channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, - ) - self.AnalyzeEntities = channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, - ) - self.AnalyzeEntitySentiment = channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, - ) - self.AnalyzeSyntax = channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, - ) - self.AnnotateText = channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnnotateText', - request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.FromString, - ) - - -class LanguageServiceServicer(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def AnalyzeSentiment(self, request, context): - """Analyzes the sentiment of the provided text. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnalyzeEntities(self, request, context): - """Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnalyzeEntitySentiment(self, request, context): - """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes - sentiment associated with each entity and its mentions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnalyzeSyntax(self, request, context): - """Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AnnotateText(self, request, context): - """A convenience method that provides all syntax, sentiment, and entity - features in one call. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_LanguageServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSentiment, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, - ), - 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntities, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, - ), - 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntitySentiment, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, - ), - 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSyntax, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, - ), - 'AnnotateText': grpc.unary_unary_rpc_method_handler( - servicer.AnnotateText, - request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/setup.py b/language/setup.py index b60edbf3f40f..139b9e936076 100644 --- a/language/setup.py +++ b/language/setup.py @@ -67,10 +67,6 @@ namespace_packages=[ 'google', 'google.cloud', - 'google.cloud.gapic', - 'google.cloud.gapic.language', - 'google.cloud.proto', - 'google.cloud.proto.language', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, diff --git a/language/tests/gapic/test_language_service_client_v1.py b/language/tests/unit/gapic/v1/test_language_service_client_v1.py similarity index 83% rename from language/tests/gapic/test_language_service_client_v1.py rename to language/tests/unit/gapic/v1/test_language_service_client_v1.py index 648ad98e50a3..6034f252cfb2 100644 --- a/language/tests/gapic/test_language_service_client_v1.py +++ b/language/tests/unit/gapic/v1/test_language_service_client_v1.py @@ -18,8 +18,8 @@ from google.gax import errors -from google.cloud.gapic.language.v1 import language_service_client -from google.cloud.proto.language.v1 import language_service_pb2 +from google.cloud import language_v1 +from google.cloud.language_v1.proto import language_service_pb2 class CustomException(Exception): @@ -33,15 +33,16 @@ def test_analyze_sentiment(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeSentimentResponse( - language=language) + **expected_response) grpc_stub.AnalyzeSentiment.return_value = expected_response response = client.analyze_sentiment(document) @@ -65,10 +66,10 @@ def test_analyze_sentiment_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeSentiment.side_effect = CustomException() @@ -81,15 +82,16 @@ def test_analyze_entities(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeEntitiesResponse( - language=language) + **expected_response) grpc_stub.AnalyzeEntities.return_value = expected_response response = client.analyze_entities(document) @@ -113,10 +115,10 @@ def test_analyze_entities_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeEntities.side_effect = CustomException() @@ -129,15 +131,16 @@ def test_analyze_syntax(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeSyntaxResponse( - language=language) + **expected_response) grpc_stub.AnalyzeSyntax.return_value = expected_response response = client.analyze_syntax(document) @@ -161,10 +164,10 @@ def test_analyze_syntax_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeSyntax.side_effect = CustomException() @@ -177,16 +180,17 @@ def test_annotate_text(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() - features = language_service_pb2.AnnotateTextRequest.Features() + document = {} + features = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnnotateTextResponse( - language=language) + **expected_response) grpc_stub.AnnotateText.return_value = expected_response response = client.annotate_text(document, features) @@ -210,11 +214,11 @@ def test_annotate_text_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() - features = language_service_pb2.AnnotateTextRequest.Features() + document = {} + features = {} # Mock exception response grpc_stub.AnnotateText.side_effect = CustomException() diff --git a/language/tests/gapic/test_language_service_client_v1beta2.py b/language/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py similarity index 73% rename from language/tests/gapic/test_language_service_client_v1beta2.py rename to language/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py index db8df687456c..b09bfa82a8a9 100644 --- a/language/tests/gapic/test_language_service_client_v1beta2.py +++ b/language/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py @@ -18,8 +18,8 @@ from google.gax import errors -from google.cloud.gapic.language.v1beta2 import language_service_client -from google.cloud.proto.language.v1beta2 import language_service_pb2 +from google.cloud import language_v1beta2 +from google.cloud.language_v1beta2.proto import language_service_pb2 class CustomException(Exception): @@ -33,15 +33,16 @@ def test_analyze_sentiment(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeSentimentResponse( - language=language) + **expected_response) grpc_stub.AnalyzeSentiment.return_value = expected_response response = client.analyze_sentiment(document) @@ -65,10 +66,10 @@ def test_analyze_sentiment_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeSentiment.side_effect = CustomException() @@ -81,15 +82,16 @@ def test_analyze_entities(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeEntitiesResponse( - language=language) + **expected_response) grpc_stub.AnalyzeEntities.return_value = expected_response response = client.analyze_entities(document) @@ -113,10 +115,10 @@ def test_analyze_entities_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeEntities.side_effect = CustomException() @@ -129,15 +131,16 @@ def test_analyze_entity_sentiment(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeEntitySentimentResponse( - language=language) + **expected_response) grpc_stub.AnalyzeEntitySentiment.return_value = expected_response response = client.analyze_entity_sentiment(document) @@ -161,10 +164,10 @@ def test_analyze_entity_sentiment_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeEntitySentiment.side_effect = CustomException() @@ -178,15 +181,16 @@ def test_analyze_syntax(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnalyzeSyntaxResponse( - language=language) + **expected_response) grpc_stub.AnalyzeSyntax.return_value = expected_response response = client.analyze_syntax(document) @@ -210,32 +214,81 @@ def test_analyze_syntax_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() + document = {} # Mock exception response grpc_stub.AnalyzeSyntax.side_effect = CustomException() self.assertRaises(errors.GaxError, client.analyze_syntax, document) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_classify_text(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_v1beta2.LanguageServiceClient() + + # Mock request + document = {} + + # Mock response + expected_response = {} + expected_response = language_service_pb2.ClassifyTextResponse( + **expected_response) + grpc_stub.ClassifyText.return_value = expected_response + + response = client.classify_text(document) + self.assertEqual(expected_response, response) + + grpc_stub.ClassifyText.assert_called_once() + args, kwargs = grpc_stub.ClassifyText.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.ClassifyTextRequest( + document=document) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_classify_text_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_v1beta2.LanguageServiceClient() + + # Mock request + document = {} + + # Mock exception response + grpc_stub.ClassifyText.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.classify_text, document) + @mock.patch('google.gax.config.create_stub', spec=True) def test_annotate_text(self, mock_create_stub): # Mock gRPC layer grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() - features = language_service_pb2.AnnotateTextRequest.Features() + document = {} + features = {} # Mock response language = 'language-1613589672' + expected_response = {'language': language} expected_response = language_service_pb2.AnnotateTextResponse( - language=language) + **expected_response) grpc_stub.AnnotateText.return_value = expected_response response = client.annotate_text(document, features) @@ -259,11 +312,11 @@ def test_annotate_text_exception(self, mock_create_stub): grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub - client = language_service_client.LanguageServiceClient() + client = language_v1beta2.LanguageServiceClient() # Mock request - document = language_service_pb2.Document() - features = language_service_pb2.AnnotateTextRequest.Features() + document = {} + features = {} # Mock exception response grpc_stub.AnnotateText.side_effect = CustomException()