From 2e27f019e834000b556fa8901f2083c047d8c153 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 12 Aug 2019 16:54:15 -0700 Subject: [PATCH 01/29] intiial commit for query piepline changes --- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 12 +++ .../azure/cosmos/_cosmos_client_connection.py | 65 +++++++++---- .../base_execution_context.py | 92 ------------------- .../execution_dispatcher.py | 84 +---------------- .../query_execution_info.py | 20 +++- .../azure/cosmos/_query_iterable.py | 60 +++++++++--- .../cosmos/_routing/routing_map_provider.py | 2 +- .../azure-cosmos/azure/cosmos/documents.py | 22 +++++ .../azure/cosmos/http_constants.py | 4 + .../azure-cosmos/test/aggregate_tests.py | 6 +- sdk/cosmos/azure-cosmos/test/query_tests.py | 41 ++++++++- 11 files changed, 199 insertions(+), 209 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 6e4bf3e10888..4ae6b8e78c10 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -141,6 +141,18 @@ def GetHeaders(cosmos_client_connection, if options.get('offerThroughput'): headers[http_constants.HttpHeaders.OfferThroughput] = options['offerThroughput'] + if options.get('contentType'): + headers[http_constants.HttpHeaders.ContentType] = options['contentType'] + + if options.get('isQueryPlanRequest'): + headers[http_constants.HttpHeaders.IsQueryPlanRequest] = options['isQueryPlanRequest'] + + if options.get('supportedQueryFeatures'): + headers[http_constants.HttpHeaders.SupportedQueryFeatures] = options['supportedQueryFeatures'] + + if options.get('queryVersion'): + headers[http_constants.HttpHeaders.QueryVersion] = options['queryVersion'] + if 'partitionKey' in options: # if partitionKey value is Undefined, serialize it as [{}] to be consistent with other SDKs. if options.get('partitionKey') is partition_key._Undefined: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 6f9fe50a1c86..096353fa1617 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -294,7 +294,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'dbs') def ReadContainers(self, database_link, options=None): """Reads all collections in a database. @@ -341,7 +341,7 @@ def fetch_fn(options): lambda _, body: body, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'colls') def CreateContainer(self, database_link, collection, options=None): """Creates a collection in a database. @@ -550,7 +550,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'users') def DeleteDatabase(self, database_link, options=None): """Deletes a database. @@ -710,7 +710,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'permissions') def ReplaceUser(self, user_link, user, options=None): """Replaces a user and return it. @@ -875,7 +875,7 @@ def fetch_fn(options): query, options, response_hook=response_hook), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn, database_or_Container_link) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'docs', database_or_Container_link) def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None): """Queries documents change feed in a collection. @@ -944,7 +944,7 @@ def fetch_fn(options): options, partition_key_range_id, response_hook=response_hook), self.last_response_headers - return query_iterable.QueryIterable(self, None, options, fetch_fn, collection_link) + return query_iterable.QueryIterable(self, None, options, fetch_fn, resource_key, collection_link) def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): """Reads Partition Key Ranges. @@ -992,7 +992,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'pkranges') def CreateItem(self, database_or_Container_link, document, options=None): """Creates a document in a collection. @@ -1173,7 +1173,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'triggers') def CreateTrigger(self, collection_link, trigger, options=None): """Creates a trigger in a collection. @@ -1308,7 +1308,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'udfs') def CreateUserDefinedFunction(self, collection_link, udf, options=None): """Creates a user defined function in a collection. @@ -1443,7 +1443,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'sprocs') def CreateStoredProcedure(self, collection_link, sproc, options=None): """Creates a stored procedure in a collection. @@ -1576,7 +1576,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'conflicts') def ReadConflict(self, conflict_link, options=None): """Reads a conflict. @@ -1899,7 +1899,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'attachments') def ReadMedia(self, media_link): @@ -2366,7 +2366,7 @@ def fetch_fn(options): lambda _, b: b, query, options), self.last_response_headers - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return query_iterable.QueryIterable(self, query, options, fetch_fn, 'offers') def GetDatabaseAccount(self, url_connection=None): """Gets database account info. @@ -2744,7 +2744,8 @@ def __QueryFeed(self, query, options=None, partition_key_range_id=None, - response_hook=None): + response_hook=None, + is_query_plan=False): """Query for more than one Azure Cosmos resources. :param str path: @@ -2783,7 +2784,7 @@ def __GetBodiesFromQueryResult(result): # Copy to make sure that default_headers won't be changed. if query is None: # Query operations will use ReadEndpoint even though it uses GET(for feed requests) - request = _request_object.RequestObject(type, documents._OperationType.ReadFeed) + request = _request_object.RequestObject(type, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) headers = base.GetHeaders(self, initial_headers, 'get', @@ -2801,7 +2802,8 @@ def __GetBodiesFromQueryResult(result): else: query = self.__CheckAndUnifyQueryFormat(query) - initial_headers[http_constants.HttpHeaders.IsQuery] = 'true' + if not is_query_plan: + initial_headers[http_constants.HttpHeaders.IsQuery] = 'true' if (self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query): initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson @@ -2811,7 +2813,7 @@ def __GetBodiesFromQueryResult(result): raise SystemError('Unexpected query compatibility mode.') # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) - request = _request_object.RequestObject(type, documents._OperationType.SqlQuery) + request = _request_object.RequestObject(type, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.SqlQuery) headers = base.GetHeaders(self, initial_headers, 'post', @@ -2831,6 +2833,35 @@ def __GetBodiesFromQueryResult(result): return __GetBodiesFromQueryResult(result) + def _GetQueryPlanThroughGateway(self, query, resource_link): + supported_query_features = (documents._QueryFeature.Aggregate + "," + + documents._QueryFeature.CompositeAggregate + "," + + documents._QueryFeature.Distinct + "," + + documents._QueryFeature.MultipleOrderBy + "," + + documents._QueryFeature.OffsetAndLimit + "," + + documents._QueryFeature.OrderBy + "," + + documents._QueryFeature.Top) + + options = { + 'contentType': runtime_constants.MediaTypes.Json, + 'isQueryPlanRequest': True, + 'supportedQueryFeatures': supported_query_features, + 'queryVersion': http_constants.Versions.QueryVersion + } + + resource_link = base.TrimBeginningAndEndingSlashes(resource_link) + path = base.GetPathFromLink(resource_link, 'docs') + resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) + + return self.__QueryFeed(path, + 'docs', + resource_id, + lambda r: r, + None, + query, + options, + is_query_plan=True) + def __CheckAndUnifyQueryFormat(self, query_body): """Checks and unifies the format of the query body. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py index e32fb2e95b28..5bf0c7df29f5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py @@ -167,97 +167,5 @@ def __init__(self, client, options, fetch_function): def _fetch_next_block(self): while super(_DefaultQueryExecutionContext, self)._has_more_pages() and len(self._buffer) == 0: return self._fetch_items_helper_with_retries(self._fetch_function) - -class _MultiCollectionQueryExecutionContext(_QueryExecutionContextBase): - """ - This class is used if it is client side partitioning - """ - def __init__(self, client, options, database_link, query, partition_key): - """ - Constructor - :param CosmosClient client: - :param dict options: - The request options for the request. - :param str database_link: database self link or ID based link - :param (str or dict) query: - Partition_key (str): partition key for the query - - """ - super(_MultiCollectionQueryExecutionContext, self).__init__(client, options) - self._current_collection_index = 0 - self._collection_links = [] - self._collection_links_length = 0 - - self._query = query - self._client = client - - partition_resolver = client.GetPartitionResolver(database_link) - - if(partition_resolver is None): - raise ValueError(client.PartitionResolverErrorMessage) - else: - self._collection_links = partition_resolver.ResolveForRead(partition_key) - - self._collection_links_length = len(self._collection_links) - - if self._collection_links is None: - raise ValueError("_collection_links is None.") - - if self._collection_links_length <= 0: - raise ValueError("_collection_links_length is not greater than 0.") - - # Creating the QueryFeed for the first collection - path = _base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') - collection_id = _base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) - - self._current_collection_index += 1 - - def fetch_fn(options): - return client.QueryFeed(path, - collection_id, - query, - options) - - self._fetch_function = fetch_fn - - def _has_more_pages(self): - return not self._has_started or self._continuation or (self._collection_links and self._current_collection_index < self._collection_links_length) - - def _fetch_next_block(self): - """Fetches the next block of query results. - - This iterates fetches the next block of results from the current collection link. - Once the current collection results were exhausted. It moves to the next collection link. - - :return: - List of fetched items. - :rtype: list - """ - # Fetch next block of results by executing the query against the current document collection - fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) - - # If there are multiple document collections to query for(in case of partitioning), keep looping through each one of them, - # creating separate feed queries for each collection and fetching the items - while not fetched_items: - if self._collection_links and self._current_collection_index < self._collection_links_length: - path = _base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') - collection_id = _base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) - - self._continuation = None - self._has_started = False - - def fetch_fn(options): - return self._client.QueryFeed(path, - collection_id, - self._query, - options) - - self._fetch_function = fetch_fn - - fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) - self._current_collection_index += 1 - else: - break - return fetched_items diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index acc7452da9ab..9cd9adc802da 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -22,91 +22,12 @@ """Internal class for proxy query execution context implementation in the Azure Cosmos database service. """ -import json from six.moves import xrange -from azure.cosmos.errors import HTTPFailure from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase -from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext -from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo from azure.cosmos._execution_context import endpoint_component -from azure.cosmos._execution_context import multi_execution_aggregator -from azure.cosmos.http_constants import StatusCodes, SubStatusCodes - -class _ProxyQueryExecutionContext(_QueryExecutionContextBase): - ''' - This class represents a proxy execution context wrapper: - - By default uses _DefaultQueryExecutionContext - - if backend responds a 400 error code with a Query Execution Info - it switches to _MultiExecutionContextAggregator - ''' - - def __init__(self, client, resource_link, query, options, fetch_function): - ''' - Constructor - ''' - super(_ProxyQueryExecutionContext, self).__init__(client, options) - - self._execution_context = _DefaultQueryExecutionContext(client, options, fetch_function) - self._resource_link = resource_link - self._query = query - self._fetch_function = fetch_function - - def next(self): - """Returns the next query result. - - :return: - The next query result. - :rtype: dict - :raises StopIteration: If no more result is left. - - """ - try: - return next(self._execution_context) - except HTTPFailure as e: - if self._is_partitioned_execution_info(e): - query_execution_info = self._get_partitioned_execution_info(e) - self._execution_context = self._create_pipelined_execution_context(query_execution_info) - else: - raise e - - return next(self._execution_context) - - def fetch_next_block(self): - """Returns a block of results. - - This method only exists for backward compatibility reasons. (Because QueryIterable - has exposed fetch_next_block api). - - :return: - List of results. - :rtype: list - """ - try: - return self._execution_context.fetch_next_block() - except HTTPFailure as e: - if self._is_partitioned_execution_info(e): - query_execution_info = self._get_partitioned_execution_info(e) - self._execution_context = self._create_pipelined_execution_context(query_execution_info) - else: - raise e - - return self._execution_context.fetch_next_block() - - def _is_partitioned_execution_info(self, e): - return e.status_code == StatusCodes.BAD_REQUEST and e.sub_status == SubStatusCodes.CROSS_PARTITION_QUERY_NOT_SERVABLE - - def _get_partitioned_execution_info(self, e): - error_msg = json.loads(e._http_error_message) - return _PartitionedQueryExecutionInfo(json.loads(error_msg['additionalErrorInfo'])) - - def _create_pipelined_execution_context(self, query_execution_info): - - assert self._resource_link, "code bug, resource_link has is required." - execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, self._resource_link, self._query, self._options, query_execution_info) - return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, query_execution_info) -class _PipelineExecutionContext(_QueryExecutionContextBase): +class _PipelineExecutionContext(_QueryExecutionContextBase): DEFAULT_PAGE_SIZE = 1000 @@ -136,8 +57,7 @@ def __init__(self, client, options, execution_context, query_execution_info): aggregates = query_execution_info.get_aggregates() if aggregates: self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) - - + def next(self): """Returns the next query result. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py index 342468e299c0..518824d34a10 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py @@ -29,8 +29,9 @@ class _PartitionedQueryExecutionInfo(object): Represents a wrapper helper for partitioned query execution info dictionary returned by the backend. ''' - + QueryInfoPath = 'queryInfo' + HasSelectValue = [QueryInfoPath, 'hasSelectValue'] TopPath = [QueryInfoPath, 'top'] OrderByPath = [QueryInfoPath, 'orderBy'] AggregatesPath = [QueryInfoPath, 'aggregates'] @@ -73,6 +74,23 @@ def get_rewritten_query(self): rewrittenQuery = rewrittenQuery.replace('{documentdb-formattableorderbyquery-filter}', 'true') return rewrittenQuery + def has_select_value(self): + return self._extract(self.HasSelectValue) + + def has_top(self): + return self.get_top() is not None + + def has_order_by(self): + order_by = self.get_order_by() + return order_by is not None and len(order_by) > 0 + + def has_aggregates(self): + aggregates = self.get_aggregates() + return aggregates is not None and len(aggregates) > 0 + + def has_rewritten_query(self): + return self.get_rewritten_query() is not None + def _extract(self, path): item = self._query_execution_info diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 8abd73b4f8c1..8d40f2605a51 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -21,15 +21,19 @@ """Iterable query results in the Azure Cosmos database service. """ -from azure.cosmos._execution_context import execution_dispatcher -from azure.cosmos._execution_context import base_execution_context +from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +from azure.cosmos._execution_context import multi_execution_aggregator +from . import http_constants +from . import errors +from ._execution_context.execution_dispatcher import _PipelineExecutionContext class QueryIterable(object): """Represents an iterable object of the query results. QueryIterable is a wrapper for query execution context. """ - def __init__(self, client, query, options, fetch_function, collection_link = None): + def __init__(self, client, query, options, fetch_function, resource_type, resource_link=None): """ Instantiates a QueryIterable for non-client side partitioning queries. _ProxyQueryExecutionContext will be used as the internal query execution context @@ -54,8 +58,9 @@ def __init__(self, client, query, options, fetch_function, collection_link = Non self._query = query self._options = options self._fetch_function = fetch_function - self._collection_link = collection_link + self._resource_link = resource_link self._ex_context = None + self._resource_type = resource_type @classmethod def PartitioningQueryIterable(cls, client, query, options, database_link, partition_key): @@ -63,8 +68,7 @@ def PartitioningQueryIterable(cls, client, query, options, database_link, partit Represents a client side partitioning query iterable. This constructor instantiates a QueryIterable for - client side partitioning queries, and sets _MultiCollectionQueryExecutionContext - as the internal execution context. + client side partitioning queries :param CosmosClient client: Instance of document client @@ -87,12 +91,44 @@ def PartitioningQueryIterable(cls, client, query, options, database_link, partit def _create_execution_context(self): """instantiates the internal query execution context based. """ - if hasattr(self, '_database_link'): - # client side partitioning query - return base_execution_context._MultiCollectionQueryExecutionContext(self._client, self._options, self._database_link, self._query, self._partition_key) - else: - # - return execution_dispatcher._ProxyQueryExecutionContext(self._client, self._collection_link, self._query, self._options, self._fetch_function) + query_execution_context = _DefaultQueryExecutionContext(self._client, self._options, self._fetch_function) + if self._resource_type != http_constants.ResourceType.Document or self._use_default_query_execution_context(): + return query_execution_context + + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway(self._query, self._resource_link)) + + # Non value aggregates must go through DefaultDocumentQueryExecutionContext + # Single partition query can serve queries like SELECT AVG(c.age) FROM c + # SELECT MIN(c.age) + 5 FROM c + # SELECT MIN(c.age), MAX(c.age) FROM c + # while pipelined queries can only serve + # SELECT VALUE . So we send the query down the old pipeline to avoid a breaking change. + + if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): + if self._options and ('enableCrossPartitionQuery' in self._options and self._options['enableCrossPartitionQuery']): + raise errors.HTTPFailure(http_constants.StatusCodes.BAD_REQUEST, "Cross partition query only supports 'VALUE ' for aggregates") + return query_execution_context + + return self._create_pipelined_execution_context(query_execution_info) + + def _use_default_query_execution_context(self): + options = ['partitionKeyRangeId', 'changeFeed', 'partitionKey'] + if self._options: + for option in options: + if option in self._options: + return True + return False + + def _create_pipelined_execution_context(self, query_execution_info): + + assert self._resource_link, "code bug, resource_link has is required." + execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, + self._resource_link, + self._query, + self._options, + query_execution_info) + return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, + query_execution_info) def __iter__(self): """Makes this class iterable. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py index 7b371e97e2d3..773067c1ddf8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py @@ -94,7 +94,7 @@ def _second_range_is_after_first_range(self, range1, range2): ##r.min < #previous_r.max return False else: - if (range2.min == range2.min and range1.isMaxInclusive and range2.isMinInclusive): + if (range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive): # the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r return False diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index 33be86d9004f..8ddbbc697746 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -386,6 +386,7 @@ class _OperationType(object): Recreate = 'Recreate' Replace = 'Replace' SqlQuery = 'SqlQuery' + QueryPlan = 'QueryPlan' Update = 'Update' Upsert = 'Upsert' @@ -408,3 +409,24 @@ def IsReadOnlyOperation(operationType): operationType == _OperationType.Query or operationType == _OperationType.SqlQuery) + @staticmethod + def IsFeedOperation(operationType): + return (operationType == _OperationType.Create or + operationType == _OperationType.Upsert or + operationType == _OperationType.ReadFeed or + operationType == _OperationType.Query or + operationType == _OperationType.SqlQuery or + operationType == _OperationType.QueryPlan or + operationType == _OperationType.HeadFeed) + +class _QueryFeature(object): + NoneQuery = "NoneQuery" + Aggregate = "Aggregate" + CompositeAggregate = "CompositeAggregate" + Distinct = "Distinct" + GroupBy = "GroupBy" + MultipleAggregates = "MultipleAggregates" + MultipleOrderBy = "MultipleOrderBy" + OffsetAndLimit = "OffsetAndLimit" + OrderBy = "OrderBy" + Top = "Top" diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py index 11c099f097e2..b88b612b8b21 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py @@ -87,6 +87,9 @@ class HttpHeaders: # Query Query = 'x-ms-documentdb-query' IsQuery = 'x-ms-documentdb-isquery' + IsQueryPlanRequest = "x-ms-cosmos-is-query-plan-request" + SupportedQueryFeatures = "x-ms-cosmos-supported-query-features" + QueryVersion = "x-ms-cosmos-query-version" # Our custom DocDB headers Continuation = 'x-ms-continuation' @@ -258,6 +261,7 @@ class Versions: CurrentVersion = '2018-12-31' SDKName = 'azure-cosmos' SDKVersion = '4.0.0a1' + QueryVersion = "1.0" class Delimiters: diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index c8dbdf131109..f2fa248f1fc7 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -187,6 +187,10 @@ def _insert_doc(collection, document_definitions): _all_tests = [] + _setup() + _generate_test_configs() + _run_all() + return type.__new__(mcs, name, bases, dict) @@ -197,8 +201,6 @@ def _execute_query_and_validate_results(self, collection, query, expected): print('Running test with query: ' + query) # executes the query and validates the results against the expected results - options = {'enableCrossPartitionQuery': 'true'} - result_iterable = collection.query_items( query=query, enable_cross_partition_query=True diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index ca6c8a377fc0..af3c7107973d 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -2,6 +2,8 @@ import uuid import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos._retry_utility as retry_utility +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +import azure.cosmos.errors as errors import pytest import test_config @@ -188,7 +190,7 @@ def test_max_item_count_honored_in_order_by_query(self): enable_cross_partition_query=True ) # 1 call to get query plans, 1 call to get pkr, 10 calls to one partion with the documents, 1 call each to other 4 partitions - self.validate_query_requests_count(query_iterable, 16 * 2) + self.validate_query_requests_count(query_iterable, 29) query_iterable = created_collection.query_items( query=query, @@ -197,7 +199,7 @@ def test_max_item_count_honored_in_order_by_query(self): ) # 1 call to get query plan 1 calls to one partition with the documents, 1 call each to other 4 partitions - self.validate_query_requests_count(query_iterable, 6 * 2) + self.validate_query_requests_count(query_iterable, 11) def validate_query_requests_count(self, query_iterable, expected_count): self.count = 0 @@ -214,6 +216,41 @@ def _MockExecuteFunction(self, function, *args, **kwargs): self.count += 1 return self.OriginalExecuteFunction(function, *args, **kwargs) + def test_get_query_plan_through_gateway(self): + created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select top 10 value count(c.id) from c", created_collection.container_link) + query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) + self._validate_query_plan(query_execution_info, 10, [], ['Count'], True) + + query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select * from c order by c._ts", created_collection.container_link) + query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) + self._validate_query_plan(query_execution_info, None, ['Ascending'], [], False) + + def _validate_query_plan(self, query_execution_info, top, order_by, aggregate, select_value): + self.assertTrue(query_execution_info.has_rewritten_query()) + self.assertEquals(query_execution_info.has_top(), top is not None) + self.assertEquals(query_execution_info.get_top(), top) + self.assertEquals(query_execution_info.has_order_by(), len(order_by) > 0) + self.assertListEqual(query_execution_info.get_order_by(), order_by) + self.assertEquals(query_execution_info.has_aggregates(), len(aggregate)> 0) + self.assertListEqual(query_execution_info.get_aggregates(), aggregate) + self.assertEquals(query_execution_info.has_select_value(), select_value) + + def test_unsupported_queries(self): + created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + queries = ['SELECT COUNT(1) FROM c', 'SELECT COUNT(1) + 5 FROM c', 'SELECT COUNT(1) + SUM(c) FROM c'] + for query in queries: + query_iterable = created_collection.query_items(query=query, enable_cross_partition_query=True) + try: + list(query_iterable) + self.fail() + except errors.HTTPFailure as e: + self.assertEqual(e.status_code, 400) + + def test_query_with_non_overlapping_pk_ranges(self): + created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + query_iterable = created_collection.query_items("select * from c where c.pk='1' or c.pk='2'") + self.assertListEqual(list(query_iterable), []) if __name__ == "__main__": unittest.main() \ No newline at end of file From 3d90acba96472f3ce24c839fdcc9d2cd43d578d2 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 12 Aug 2019 20:49:16 -0700 Subject: [PATCH 02/29] initial commit for offset and limit --- .../_execution_context/endpoint_component.py | 20 +++++++++++ .../execution_dispatcher.py | 16 ++++++--- .../query_execution_info.py | 20 ++++++++++- sdk/cosmos/azure-cosmos/test/query_tests.py | 33 ++++++++++++++++--- 4 files changed, 79 insertions(+), 10 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 77dda8512b00..10560abc19f2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -68,6 +68,26 @@ def next(self): return res raise StopIteration + +class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling offset query. + + It returns results offset by as many results as offset arg specified. + """ + def __init__(self, execution_context, offset_count): + super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) + self._offset_count = offset_count + + def next(self): + while self._offset_count > 0: + res = next(self._execution_context) + if res is not None: + self._offset_count -= 1 + else: + raise StopIteration + return next(self._execution_context) + + class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling aggregate query. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index 9cd9adc802da..af4bc5283a21 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -49,15 +49,23 @@ def __init__(self, client, options, execution_context, query_execution_info): order_by = query_execution_info.get_order_by() if (order_by): self._endpoint = endpoint_component._QueryExecutionOrderByEndpointComponent(self._endpoint) - - top = query_execution_info.get_top() - if not (top is None): - self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) aggregates = query_execution_info.get_aggregates() if aggregates: self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) + offset = query_execution_info.get_offset() + if not (offset is None): + self._endpoint = endpoint_component._QueryExecutionOffsetEndpointComponent(self._endpoint, offset) + + top = query_execution_info.get_top() + if not (top is None): + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) + + limit = query_execution_info.get_limit() + if not (limit is None): + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) + def next(self): """Returns the next query result. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py index 518824d34a10..6d2ff45afcd1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py @@ -33,6 +33,8 @@ class _PartitionedQueryExecutionInfo(object): QueryInfoPath = 'queryInfo' HasSelectValue = [QueryInfoPath, 'hasSelectValue'] TopPath = [QueryInfoPath, 'top'] + OffsetPath = [QueryInfoPath, 'offset'] + LimitPath = [QueryInfoPath, 'limit'] OrderByPath = [QueryInfoPath, 'orderBy'] AggregatesPath = [QueryInfoPath, 'aggregates'] QueryRangesPath = 'queryRanges' @@ -49,7 +51,17 @@ def get_top(self): """Returns the top count (if any) or None """ return self._extract(_PartitionedQueryExecutionInfo.TopPath) - + + def get_limit(self): + """Returns the limit count (if any) or None + """ + return self._extract(_PartitionedQueryExecutionInfo.LimitPath) + + def get_offset(self): + """Returns the offset count (if any) or None + """ + return self._extract(_PartitionedQueryExecutionInfo.OffsetPath) + def get_order_by(self): """Returns order by items (if any) or None """ @@ -80,6 +92,12 @@ def has_select_value(self): def has_top(self): return self.get_top() is not None + def has_limit(self): + return self.get_limit() is not None + + def has_offset(self): + return self.get_offset() is not None + def has_order_by(self): order_by = self.get_order_by() return order_by is not None and len(order_by) > 0 diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index af3c7107973d..f06c3df0f471 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -220,21 +220,25 @@ def test_get_query_plan_through_gateway(self): created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select top 10 value count(c.id) from c", created_collection.container_link) query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) - self._validate_query_plan(query_execution_info, 10, [], ['Count'], True) + self._validate_query_plan(query_execution_info, 10, [], ['Count'], True, None, None) - query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select * from c order by c._ts", created_collection.container_link) + query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select * from c order by c._ts offset 5 limit 10", created_collection.container_link) query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) - self._validate_query_plan(query_execution_info, None, ['Ascending'], [], False) + self._validate_query_plan(query_execution_info, None, ['Ascending'], [], False, 5, 10) - def _validate_query_plan(self, query_execution_info, top, order_by, aggregate, select_value): + def _validate_query_plan(self, query_execution_info, top, order_by, aggregate, select_value, offset, limit): self.assertTrue(query_execution_info.has_rewritten_query()) self.assertEquals(query_execution_info.has_top(), top is not None) self.assertEquals(query_execution_info.get_top(), top) self.assertEquals(query_execution_info.has_order_by(), len(order_by) > 0) self.assertListEqual(query_execution_info.get_order_by(), order_by) - self.assertEquals(query_execution_info.has_aggregates(), len(aggregate)> 0) + self.assertEquals(query_execution_info.has_aggregates(), len(aggregate) > 0) self.assertListEqual(query_execution_info.get_aggregates(), aggregate) self.assertEquals(query_execution_info.has_select_value(), select_value) + self.assertEquals(query_execution_info.has_offset(), offset is not None) + self.assertEquals(query_execution_info.get_offset(), offset) + self.assertEquals(query_execution_info.has_limit(), limit is not None) + self.assertEquals(query_execution_info.get_limit(), limit) def test_unsupported_queries(self): created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) @@ -252,5 +256,24 @@ def test_query_with_non_overlapping_pk_ranges(self): query_iterable = created_collection.query_items("select * from c where c.pk='1' or c.pk='2'") self.assertListEqual(list(query_iterable), []) + def test_offset_limit(self): + created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + values = [] + for i in range(10): + document_definition = {'pk': i, 'id': 'myId' + str(uuid.uuid4())} + values.append(created_collection.create_item(body=document_definition)['pk']) + + self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', values[:5]) + self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', values[5:]) + self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', []) + self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', []) + + def _validate_skip_take(self, created_collection, query, results): + query_iterable = created_collection.query_items( + query=query, + enable_cross_partition_query=True + ) + self.assertListEqual(list(map(lambda doc: doc['pk'], list(query_iterable))), results) + if __name__ == "__main__": unittest.main() \ No newline at end of file From bab7e2c5e57c2657d6d791bc66ba24feca50d697 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 12 Aug 2019 20:52:16 -0700 Subject: [PATCH 03/29] modified aggregate tests to check for top as well after bugfix --- sdk/cosmos/azure-cosmos/test/aggregate_tests.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index f2fa248f1fc7..8f825a152230 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -96,8 +96,8 @@ def _setup(): _insert_doc(mcs.created_collection, document_definitions) def _generate_test_configs(): - aggregate_query_format = 'SELECT VALUE {}(r.{}) FROM r WHERE {}' - aggregate_orderby_query_format = 'SELECT VALUE {}(r.{}) FROM r WHERE {} ORDER BY r.{}' + aggregate_query_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE {}' + aggregate_orderby_query_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE {} ORDER BY r.{}' aggregate_configs = [ ['AVG', _config.sum / _config.docs_with_numeric_id, 'IS_NUMBER(r.{})'.format(_config.PARTITION_KEY)], @@ -119,8 +119,8 @@ def _generate_test_configs(): _config.PARTITION_KEY), expected]) - aggregate_single_partition_format = 'SELECT VALUE {}(r.{}) FROM r WHERE r.{} = \'{}\'' - aggregate_orderby_single_partition_format = 'SELECT {}(r.{}) FROM r WHERE r.{} = \'{}\'' + aggregate_single_partition_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE r.{} = \'{}\'' + aggregate_orderby_single_partition_format = 'SELECT TOP 1 {}(r.{}) FROM r WHERE r.{} = \'{}\'' same_partiton_sum = _config.DOCS_WITH_SAME_PARTITION_KEY * (_config.DOCS_WITH_SAME_PARTITION_KEY + 1) / 2.0 aggregate_single_partition_configs = [ ['AVG', same_partiton_sum / _config.DOCS_WITH_SAME_PARTITION_KEY], From 1bc1213d28bb65fbda65bdef86f3c48a14e677af Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Wed, 14 Aug 2019 18:14:13 -0700 Subject: [PATCH 04/29] Added support for distinct --- .../_execution_context/endpoint_component.py | 34 +++++ .../execution_dispatcher.py | 7 + .../query_execution_info.py | 9 ++ sdk/cosmos/azure-cosmos/test/query_tests.py | 139 ++++++++++++++++-- 4 files changed, 177 insertions(+), 12 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 10560abc19f2..e6099f8e511d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -69,6 +69,40 @@ def next(self): raise StopIteration +class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) + self.last_result = None + + def next(self): + res = next(self._execution_context) + while self.last_result == res: + res = next(self._execution_context) + self.last_result = res + return res + + +class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) + self.last_result = set() + + def next(self): + res = next(self._execution_context) + while str(res) in self.last_result: + res = next(self._execution_context) + self.last_result.add(str(res)) + return res + + class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling offset query. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index af4bc5283a21..c3cd517b7156 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -66,6 +66,13 @@ def __init__(self, client, options, execution_context, query_execution_info): if not (limit is None): self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) + distinct_type = query_execution_info.get_distinct_type() + if distinct_type is not None: + if distinct_type == "Ordered": + self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) + else: + self._endpoint = endpoint_component._QueryExecutionDistinctUnorderedEndpointComponent(self._endpoint) + def next(self): """Returns the next query result. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py index 6d2ff45afcd1..888f5a557131 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py @@ -35,6 +35,7 @@ class _PartitionedQueryExecutionInfo(object): TopPath = [QueryInfoPath, 'top'] OffsetPath = [QueryInfoPath, 'offset'] LimitPath = [QueryInfoPath, 'limit'] + DistinctTypePath = [QueryInfoPath, 'distinctType'] OrderByPath = [QueryInfoPath, 'orderBy'] AggregatesPath = [QueryInfoPath, 'aggregates'] QueryRangesPath = 'queryRanges' @@ -62,6 +63,11 @@ def get_offset(self): """ return self._extract(_PartitionedQueryExecutionInfo.OffsetPath) + def get_distinct_type(self): + """Returns the offset count (if any) or None + """ + return self._extract(_PartitionedQueryExecutionInfo.DistinctTypePath) + def get_order_by(self): """Returns order by items (if any) or None """ @@ -98,6 +104,9 @@ def has_limit(self): def has_offset(self): return self.get_offset() is not None + def has_distinct_type(self): + return self.get_distinct_type() != "None" + def has_order_by(self): order_by = self.get_order_by() return order_by is not None and len(order_by) > 0 diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index f06c3df0f471..998e3c627d5d 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -4,7 +4,9 @@ import azure.cosmos._retry_utility as retry_utility from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo import azure.cosmos.errors as errors +from azure.cosmos.partition_key import PartitionKey import pytest +import collections import test_config pytestmark = pytest.mark.cosmosEmulator @@ -218,16 +220,16 @@ def _MockExecuteFunction(self, function, *args, **kwargs): def test_get_query_plan_through_gateway(self): created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) - query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select top 10 value count(c.id) from c", created_collection.container_link) - query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) - self._validate_query_plan(query_execution_info, 10, [], ['Count'], True, None, None) + self._validate_query_plan("Select top 10 value count(c.id) from c", created_collection.container_link, 10, [], ['Count'], True, None, None, "None") + self._validate_query_plan("Select * from c order by c._ts offset 5 limit 10", created_collection.container_link, None, ['Ascending'], [], False, 5, 10, "None") + self._validate_query_plan("Select distinct value c.id from c order by c.id", created_collection.container_link, None, ['Ascending'], [], True, None, None, "Ordered") - query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway("Select * from c order by c._ts offset 5 limit 10", created_collection.container_link) + def _validate_query_plan(self, query, container_link, top, order_by, aggregate, select_value, offset, limit, distinct): + query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway(query, container_link) query_execution_info = _PartitionedQueryExecutionInfo(query_plan_dict) - self._validate_query_plan(query_execution_info, None, ['Ascending'], [], False, 5, 10) - - def _validate_query_plan(self, query_execution_info, top, order_by, aggregate, select_value, offset, limit): self.assertTrue(query_execution_info.has_rewritten_query()) + self.assertEquals(query_execution_info.has_distinct_type(), distinct != "None") + self.assertEquals(query_execution_info.get_distinct_type(), distinct) self.assertEquals(query_execution_info.has_top(), top is not None) self.assertEquals(query_execution_info.get_top(), top) self.assertEquals(query_execution_info.has_order_by(), len(order_by) > 0) @@ -263,17 +265,130 @@ def test_offset_limit(self): document_definition = {'pk': i, 'id': 'myId' + str(uuid.uuid4())} values.append(created_collection.create_item(body=document_definition)['pk']) - self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', values[:5]) - self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', values[5:]) - self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', []) - self._validate_skip_take(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', []) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', values[:5]) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', values[5:]) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', []) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', []) - def _validate_skip_take(self, created_collection, query, results): + def _validate_offset_limit(self, created_collection, query, results): query_iterable = created_collection.query_items( query=query, enable_cross_partition_query=True ) self.assertListEqual(list(map(lambda doc: doc['pk'], list(query_iterable))), results) + def test_distinct(self): + created_database = self.config.create_database_if_not_exist(self.client) + distinct_field = 'distinct_field' + pk_field = "pk" + different_field = "different_field" + + created_collection = created_database.create_container( + id='collection with composite index ' + str(uuid.uuid4()), + partition_key=PartitionKey(path="/pk", kind="Hash"), + indexing_policy={ + "compositeIndexes": [ + [{"path": "/" + pk_field, "order": "ascending"}, {"path": "/" + distinct_field, "order": "ascending"}], + [{"path": "/" + distinct_field, "order": "ascending"}, {"path": "/" + pk_field, "order": "ascending"}] + ] + } + ) + documents = [] + for i in range(5): + j = i + while j > i - 5: + document_definition = {pk_field: i, 'id': str(uuid.uuid4()), distinct_field: j} + documents.append(created_collection.create_item(body=document_definition)) + document_definition = {pk_field: i, 'id': str(uuid.uuid4()), distinct_field: j} + documents.append(created_collection.create_item(body=document_definition)) + document_definition = {pk_field: i, 'id': str(uuid.uuid4())} + documents.append(created_collection.create_item(body=document_definition)) + j -= 1 + + padded_docs = self._pad_with_none(documents, distinct_field) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), + self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), + False, [distinct_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field), + self._get_distinct_docs(self._get_order_by_docs(padded_docs, pk_field, distinct_field), distinct_field, pk_field, True), + False, [distinct_field, pk_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field), + self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, pk_field), distinct_field, pk_field, True), + False, [distinct_field, pk_field]) + + self._validate_distinct(created_collection, 'SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), + self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), + False, [distinct_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s from c' % (distinct_field), + self._get_distinct_docs(padded_docs, distinct_field, None, False), + True, [distinct_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field), + self._get_distinct_docs(padded_docs, distinct_field, pk_field, False), + True, [distinct_field, pk_field]) + + self._validate_distinct(created_collection, 'SELECT distinct value c.%s from c' % (distinct_field), + self._get_distinct_docs(padded_docs, distinct_field, None, True), + True, [distinct_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field), [], + True, [different_field]) + + self._validate_distinct(created_collection, 'SELECT distinct c.%s from c' % (different_field), ['None'], + True, different_field) + + created_database.delete_container(created_collection.id) + + def _get_order_by_docs(self, documents, field1, field2): + if field2 is None: + return sorted(documents, key=lambda d: (d[field1] is not None, d[field1])) + else: + return sorted(documents, key=lambda d: (d[field1] is not None, d[field1], d[field2] is not None, d[field2])) + + def _get_distinct_docs(self, documents, field1, field2, is_order_by_or_value): + if field2 is None: + res = collections.OrderedDict.fromkeys(doc[field1] for doc in documents) + if is_order_by_or_value: + res = filter(lambda x: False if x is None else True, res) + else: + res = collections.OrderedDict.fromkeys(str(doc[field1]) + "," + str(doc[field2]) for doc in documents) + return list(res) + + def _pad_with_none(self, documents, field): + for doc in documents: + if field not in doc: + doc[field] = None + return documents + + def _validate_distinct(self, created_collection, query, results, is_select, fields): + query_iterable = created_collection.query_items( + query=query, + enable_cross_partition_query=True + ) + query_results = list(query_iterable) + self.assertEquals(len(results), len(query_results)) + query_results_strings = [] + result_strings = [] + for i in range(len(results)): + query_results_strings.append(self._get_query_result_string(query_results[i], fields)) + result_strings.append(str(results[i])) + if is_select: + query_results_strings = sorted(query_results_strings) + result_strings = sorted(result_strings) + self.assertListEqual(result_strings, query_results_strings) + + def _get_query_result_string(self, query_result, fields): + if type(query_result) is not dict: + return str(query_result) + res = str(query_result[fields[0]] if fields[0] in query_result else None) + if len(fields) == 2: + res = res + "," + str(query_result[fields[1]] if fields[1] in query_result else None) + + return res + if __name__ == "__main__": unittest.main() \ No newline at end of file From 34729b7bfa517005946570a16d1ea065617678f6 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 15 Aug 2019 15:04:10 -0700 Subject: [PATCH 05/29] modified aggregate tests to run in mono repo --- .../azure-cosmos/test/aggregate_tests.py | 292 +++++++++--------- 1 file changed, 146 insertions(+), 146 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index 8f825a152230..94fdf5ff82ad 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -49,156 +49,155 @@ class _config: sum = 0 -class AggregateQueryTestSequenceMeta(type): - def __new__(mcs, name, bases, dict): - def _run_one(query, expected_result): - def test(self): - self._execute_query_and_validate_results(mcs.created_collection, query, expected_result) - - return test - - def _setup(): - if (not _config.master_key or not _config.host): - raise Exception( - "You must specify your Azure Cosmos account values for " - "'masterKey' and 'host' at the top of this class to run the " - "tests.") - - mcs.client = cosmos_client.CosmosClient(_config.host, - {'masterKey': _config.master_key}, "Session", _config.connection_policy) - created_db = test_config._test_config.create_database_if_not_exist(mcs.client) - mcs.created_collection = _create_collection(created_db) - - # test documents - document_definitions = [] - - values = [None, False, True, "abc", "cdfg", "opqrs", "ttttttt", "xyz", "oo", "ppp"] - for value in values: - d = {_config.PARTITION_KEY: value, 'id': str(uuid.uuid4())} - document_definitions.append(d) - - for i in xrange(_config.DOCS_WITH_SAME_PARTITION_KEY): - d = {_config.PARTITION_KEY: _config.UNIQUE_PARTITION_KEY, - 'resourceId': i, - _config.FIELD: i + 1, - 'id': str(uuid.uuid4())} - document_definitions.append(d) - - _config.docs_with_numeric_id = \ - _config.DOCUMENTS_COUNT - len(values) - _config.DOCS_WITH_SAME_PARTITION_KEY - for i in xrange(_config.docs_with_numeric_id): - d = {_config.PARTITION_KEY: i + 1, 'id': str(uuid.uuid4())} - document_definitions.append(d) - - _config.sum = _config.docs_with_numeric_id \ - * (_config.docs_with_numeric_id + 1) / 2.0 - - _insert_doc(mcs.created_collection, document_definitions) - - def _generate_test_configs(): - aggregate_query_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE {}' - aggregate_orderby_query_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE {} ORDER BY r.{}' - aggregate_configs = [ - ['AVG', _config.sum / _config.docs_with_numeric_id, - 'IS_NUMBER(r.{})'.format(_config.PARTITION_KEY)], - ['AVG', None, 'true'], - ['COUNT', _config.DOCUMENTS_COUNT, 'true'], - ['MAX', 'xyz', 'true'], - ['MIN', None, 'true'], - ['SUM', _config.sum, 'IS_NUMBER(r.{})'.format(_config.PARTITION_KEY)], - ['SUM', None, 'true'] - ] - for operator, expected, condition in aggregate_configs: - _all_tests.append([ - '{} {}'.format(operator, condition), - aggregate_query_format.format(operator, _config.PARTITION_KEY, condition), - expected]) - _all_tests.append([ - '{} {} OrderBy'.format(operator, condition), - aggregate_orderby_query_format.format(operator, _config.PARTITION_KEY, condition, - _config.PARTITION_KEY), - expected]) - - aggregate_single_partition_format = 'SELECT TOP 1 VALUE {}(r.{}) FROM r WHERE r.{} = \'{}\'' - aggregate_orderby_single_partition_format = 'SELECT TOP 1 {}(r.{}) FROM r WHERE r.{} = \'{}\'' - same_partiton_sum = _config.DOCS_WITH_SAME_PARTITION_KEY * (_config.DOCS_WITH_SAME_PARTITION_KEY + 1) / 2.0 - aggregate_single_partition_configs = [ - ['AVG', same_partiton_sum / _config.DOCS_WITH_SAME_PARTITION_KEY], - ['COUNT', _config.DOCS_WITH_SAME_PARTITION_KEY], - ['MAX', _config.DOCS_WITH_SAME_PARTITION_KEY], - ['MIN', 1], - ['SUM', same_partiton_sum] - ] - for operator, expected in aggregate_single_partition_configs: - _all_tests.append([ - '{} SinglePartition {}'.format(operator, 'SELECT VALUE'), - aggregate_single_partition_format.format( - operator, _config.FIELD, _config.PARTITION_KEY, _config.UNIQUE_PARTITION_KEY), expected]) - _all_tests.append([ - '{} SinglePartition {}'.format(operator, 'SELECT'), - aggregate_orderby_single_partition_format.format( - operator, _config.FIELD, _config.PARTITION_KEY, _config.UNIQUE_PARTITION_KEY), - Exception()]) - - def _run_all(): - for test_name, query, expected_result in _all_tests: - test_name = "test_%s" % test_name - dict[test_name] = _run_one(query, expected_result) - - def _create_collection(created_db): - # type: (Database) -> Container - created_collection = created_db.create_container( - id='aggregate tests collection ' + str(uuid.uuid4()), - indexing_policy={ - 'includedPaths': [ - { - 'path': '/', - 'indexes': [ - { - 'kind': 'Range', - 'dataType': 'Number' - }, - { - 'kind': 'Range', - 'dataType': 'String' - } - ] - } - ] - }, - partition_key=PartitionKey( - path='/{}'.format(_config.PARTITION_KEY), - kind=documents.PartitionKind.Hash, - ), - offer_throughput=10100 - ) - - return created_collection - - def _insert_doc(collection, document_definitions): - # type: (Container, Dict[str, Any]) -> [Dict[str, Any]] - created_docs = [] - for d in document_definitions: - print(d) - created_doc = collection.create_item(body=d) - created_docs.append(created_doc) - - return created_docs - - _all_tests = [] - - _setup() - _generate_test_configs() - _run_all() - - return type.__new__(mcs, name, bases, dict) +@pytest.mark.usefixtures("teardown") +class AggregationQueryTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls._all_tests = [] + cls._setup() + cls._generate_test_configs() + + @classmethod + def _setup(cls): + if (not _config.master_key or not _config.host): + raise Exception( + "You must specify your Azure Cosmos account values for " + "'masterKey' and 'host' at the top of this class to run the " + "tests.") + + cls.client = cosmos_client.CosmosClient(_config.host, + {'masterKey': _config.master_key}, "Session", _config.connection_policy) + created_db = test_config._test_config.create_database_if_not_exist(cls.client) + cls.created_collection = cls._create_collection(created_db) + + # test documents + document_definitions = [] + + values = [None, False, True, "abc", "cdfg", "opqrs", "ttttttt", "xyz", "oo", "ppp"] + for value in values: + d = {_config.PARTITION_KEY: value, 'id': str(uuid.uuid4())} + document_definitions.append(d) + + for i in xrange(_config.DOCS_WITH_SAME_PARTITION_KEY): + d = {_config.PARTITION_KEY: _config.UNIQUE_PARTITION_KEY, + 'resourceId': i, + _config.FIELD: i + 1, + 'id': str(uuid.uuid4())} + document_definitions.append(d) + + _config.docs_with_numeric_id = \ + _config.DOCUMENTS_COUNT - len(values) - _config.DOCS_WITH_SAME_PARTITION_KEY + for i in xrange(_config.docs_with_numeric_id): + d = {_config.PARTITION_KEY: i + 1, 'id': str(uuid.uuid4())} + document_definitions.append(d) + + _config.sum = _config.docs_with_numeric_id \ + * (_config.docs_with_numeric_id + 1) / 2.0 + + cls._insert_doc(cls.created_collection, document_definitions) + + @classmethod + def _generate_test_configs(cls): + aggregate_query_format = 'SELECT VALUE {}(r.{}) FROM r WHERE {}' + aggregate_orderby_query_format = 'SELECT VALUE {}(r.{}) FROM r WHERE {} ORDER BY r.{}' + aggregate_configs = [ + ['AVG', _config.sum / _config.docs_with_numeric_id, + 'IS_NUMBER(r.{})'.format(_config.PARTITION_KEY)], + ['AVG', None, 'true'], + ['COUNT', _config.DOCUMENTS_COUNT, 'true'], + ['MAX', 'xyz', 'true'], + ['MIN', None, 'true'], + ['SUM', _config.sum, 'IS_NUMBER(r.{})'.format(_config.PARTITION_KEY)], + ['SUM', None, 'true'] + ] + for operator, expected, condition in aggregate_configs: + cls._all_tests.append([ + '{} {}'.format(operator, condition), + aggregate_query_format.format(operator, _config.PARTITION_KEY, condition), + expected]) + cls._all_tests.append([ + '{} {} OrderBy'.format(operator, condition), + aggregate_orderby_query_format.format(operator, _config.PARTITION_KEY, condition, + _config.PARTITION_KEY), + expected]) + + aggregate_single_partition_format = 'SELECT VALUE {}(r.{}) FROM r WHERE r.{} = \'{}\'' + aggregate_orderby_single_partition_format = 'SELECT {}(r.{}) FROM r WHERE r.{} = \'{}\'' + same_partiton_sum = _config.DOCS_WITH_SAME_PARTITION_KEY * (_config.DOCS_WITH_SAME_PARTITION_KEY + 1) / 2.0 + aggregate_single_partition_configs = [ + ['AVG', same_partiton_sum / _config.DOCS_WITH_SAME_PARTITION_KEY], + ['COUNT', _config.DOCS_WITH_SAME_PARTITION_KEY], + ['MAX', _config.DOCS_WITH_SAME_PARTITION_KEY], + ['MIN', 1], + ['SUM', same_partiton_sum] + ] + for operator, expected in aggregate_single_partition_configs: + cls._all_tests.append([ + '{} SinglePartition {}'.format(operator, 'SELECT VALUE'), + aggregate_single_partition_format.format( + operator, _config.FIELD, _config.PARTITION_KEY, _config.UNIQUE_PARTITION_KEY), expected]) + cls._all_tests.append([ + '{} SinglePartition {}'.format(operator, 'SELECT'), + aggregate_orderby_single_partition_format.format( + operator, _config.FIELD, _config.PARTITION_KEY, _config.UNIQUE_PARTITION_KEY), + Exception()]) + + def test_run_all(self): + for test_name, query, expected_result in self._all_tests: + test_name = "test_%s" % test_name + try: + self._run_one(query, expected_result) + print(test_name + ': ' + query + " PASSED") + except Exception as e: + print(test_name + ': ' + query + " FAILED") + raise e + + def _run_one(self, query, expected_result): + self._execute_query_and_validate_results(self.created_collection, query, expected_result) + + @classmethod + def _create_collection(cls, created_db): + # type: (Database) -> Container + created_collection = created_db.create_container( + id='aggregate tests collection ' + str(uuid.uuid4()), + indexing_policy={ + 'includedPaths': [ + { + 'path': '/', + 'indexes': [ + { + 'kind': 'Range', + 'dataType': 'Number' + }, + { + 'kind': 'Range', + 'dataType': 'String' + } + ] + } + ] + }, + partition_key=PartitionKey( + path='/{}'.format(_config.PARTITION_KEY), + kind=documents.PartitionKind.Hash, + ), + offer_throughput=10100 + ) + return created_collection + + @classmethod + def _insert_doc(cls, collection, document_definitions): + # type: (Container, Dict[str, Any]) -> [Dict[str, Any]] + created_docs = [] + for d in document_definitions: + created_doc = collection.create_item(body=d) + created_docs.append(created_doc) + + return created_docs -@pytest.mark.usefixtures("teardown") -class AggregationQueryTest(with_metaclass(AggregateQueryTestSequenceMeta, unittest.TestCase)): def _execute_query_and_validate_results(self, collection, query, expected): # type: (Container, str, [Dict[str, Any]]) -> None - print('Running test with query: ' + query) # executes the query and validates the results against the expected results result_iterable = collection.query_items( @@ -239,5 +238,6 @@ def invokeNext(): else: _verify_result() + if __name__ == "__main__": unittest.main() From 2a25ad80f4b176abcba81c11c8e189e5da8bc6b4 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 15 Aug 2019 17:41:13 -0700 Subject: [PATCH 06/29] fixed failing tests and bugs --- .../azure/cosmos/_default_retry_policy.py | 3 +- .../execution_dispatcher.py | 2 +- .../azure/cosmos/_query_iterable.py | 3 +- .../azure-cosmos/azure/cosmos/container.py | 72 +++++++++---------- .../azure/cosmos/cosmos_client.py | 20 +++--- .../azure-cosmos/azure/cosmos/database.py | 40 +++++------ sdk/cosmos/azure-cosmos/test/query_tests.py | 2 - .../azure-cosmos/test/retry_policy_tests.py | 2 +- 8 files changed, 72 insertions(+), 72 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 8cb094c1552a..409a54398625 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -54,7 +54,8 @@ def __init__(self, *args): def needsRetry(self, error_code): if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: if (len(self.args) > 0): - if (self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers']): + if ((self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers'])\ + or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[4]['headers'])): return True return False return True diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index c3cd517b7156..cd9e4bb61aea 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -67,7 +67,7 @@ def __init__(self, client, options, execution_context, query_execution_info): self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) distinct_type = query_execution_info.get_distinct_type() - if distinct_type is not None: + if distinct_type != 'None': if distinct_type == "Ordered": self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) else: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 8d40f2605a51..0493a9eb41df 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -95,7 +95,8 @@ def _create_execution_context(self): if self._resource_type != http_constants.ResourceType.Document or self._use_default_query_execution_context(): return query_execution_context - query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway(self._query, self._resource_link)) + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway(query_to_use, self._resource_link)) # Non value aggregates must go through DefaultDocumentQueryExecutionContext # Single partition query can serve queries like SELECT AVG(c.age) FROM c diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 7d2b9fd35795..ddac0b98e774 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -124,11 +124,11 @@ def read( :returns: :class:`Container` instance representing the retrieved container. """ - if not request_options: + if not request_options is not None: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -186,15 +186,15 @@ def read_item( if not request_options: request_options = {} # type: Dict[str, Any] - if partition_key: + if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - if post_trigger_include: + if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include result = self.client_connection.ReadItem( @@ -228,9 +228,9 @@ def read_all_items( feed_options = {} # type: Dict[str, Any] if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -346,9 +346,9 @@ def query_items( feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -407,17 +407,17 @@ def replace_item( if not request_options: request_options = {} # type: Dict[str, Any] request_options["disableIdGeneration"] = True - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include: + if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include: + if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include result = self.client_connection.ReplaceItem( @@ -462,17 +462,17 @@ def upsert_item( if not request_options: request_options = {} # type: Dict[str, Any] request_options["disableIdGeneration"] = True - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include: + if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include: + if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include result = self.client_connection.UpsertItem( @@ -519,19 +519,19 @@ def create_item( request_options = {} # type: Dict[str, Any] request_options["disableAutomaticIdGeneration"] = True - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition - if populate_query_metrics: + if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include: + if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include: + if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include - if indexing_directive: + if indexing_directive is not None: request_options["indexingDirective"] = indexing_directive result = self.client_connection.CreateItem( @@ -574,19 +574,19 @@ def delete_item( """ if not request_options: request_options = {} # type: Dict[str, Any] - if partition_key: + if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include: + if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include: + if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include document_link = self._get_document_link(item) @@ -755,7 +755,7 @@ def get_conflict( """ if not request_options: request_options = {} # type: Dict[str, Any] - if partition_key: + if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) result = self.client_connection.ReadConflict( @@ -785,7 +785,7 @@ def delete_conflict( """ if not request_options: request_options = {} # type: Dict[str, Any] - if partition_key: + if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) result = self.client_connection.DeleteConflict( diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index a4d5ff828734..500260378466 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -125,11 +125,11 @@ def create_database( if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -191,9 +191,9 @@ def read_all_databases( feed_options = {} # type: Dict[str, Any] if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -240,9 +240,9 @@ def query_databases( feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -291,11 +291,11 @@ def delete_database( """ if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 21df096ca050..8a6dd153e846 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -136,9 +136,9 @@ def read( database_link = CosmosClient._get_database_link(self) if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -209,24 +209,24 @@ def create_container( """ definition = dict(id=id) # type: Dict[str, Any] - if partition_key: + if partition_key is not None: definition["partitionKey"] = partition_key - if indexing_policy: + if indexing_policy is not None: definition["indexingPolicy"] = indexing_policy - if default_ttl: + if default_ttl is not None: definition["defaultTtl"] = default_ttl - if unique_key_policy: + if unique_key_policy is not None: definition["uniqueKeyPolicy"] = unique_key_policy - if conflict_resolution_policy: + if conflict_resolution_policy is not None: definition["conflictResolutionPolicy"] = conflict_resolution_policy if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -269,11 +269,11 @@ def delete_container( """ if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -347,9 +347,9 @@ def read_all_containers( feed_options = {} # type: Dict[str, Any] if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -392,9 +392,9 @@ def query_containers( feed_options = {} # type: Dict[str, Any] if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: + if session_token is not None: feed_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -454,11 +454,11 @@ def replace_container( """ if not request_options: request_options = {} # type: Dict[str, Any] - if session_token: + if session_token is not None: request_options["sessionToken"] = session_token - if initial_headers: + if initial_headers is not None: request_options["initialHeaders"] = initial_headers - if access_condition: + if access_condition is not None: request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 998e3c627d5d..1657354cdd3e 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -191,7 +191,6 @@ def test_max_item_count_honored_in_order_by_query(self): max_item_count=1, enable_cross_partition_query=True ) - # 1 call to get query plans, 1 call to get pkr, 10 calls to one partion with the documents, 1 call each to other 4 partitions self.validate_query_requests_count(query_iterable, 29) query_iterable = created_collection.query_items( @@ -200,7 +199,6 @@ def test_max_item_count_honored_in_order_by_query(self): enable_cross_partition_query=True ) - # 1 call to get query plan 1 calls to one partition with the documents, 1 call each to other 4 partitions self.validate_query_requests_count(query_iterable, 11) def validate_query_requests_count(self, query_iterable, expected_count): diff --git a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py index f25dfce77e78..c0061ea9267a 100644 --- a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py @@ -192,7 +192,7 @@ def test_default_retry_policy_for_query(self): result_docs = list(docs) self.assertEqual(result_docs[0]['id'], 'doc1') self.assertEqual(result_docs[1]['id'], 'doc2') - self.assertEqual(mf.counter, 12) + self.assertEqual(mf.counter, 15) finally: _retry_utility.ExecuteFunction = original_execute_function From a88ff226af051cac6299a916cb816877098004b4 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Fri, 16 Aug 2019 11:34:30 -0700 Subject: [PATCH 07/29] updated tests --- sdk/cosmos/azure-cosmos/test/query_tests.py | 101 ++++++++++++-------- 1 file changed, 62 insertions(+), 39 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 1657354cdd3e..dbf724e6cb6b 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -258,20 +258,23 @@ def test_query_with_non_overlapping_pk_ranges(self): def test_offset_limit(self): created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + max_item_counts = [0, 2, 5, 10] values = [] for i in range(10): document_definition = {'pk': i, 'id': 'myId' + str(uuid.uuid4())} values.append(created_collection.create_item(body=document_definition)['pk']) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', values[:5]) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', values[5:]) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', []) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', []) + for max_item_count in max_item_counts: + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', max_item_count, values[:5]) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', max_item_count, values[5:]) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', max_item_count, []) + self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', max_item_count, []) - def _validate_offset_limit(self, created_collection, query, results): + def _validate_offset_limit(self, created_collection, query, max_item_count, results): query_iterable = created_collection.query_items( query=query, - enable_cross_partition_query=True + enable_cross_partition_query=True, + max_item_count=max_item_count ) self.assertListEqual(list(map(lambda doc: doc['pk'], list(query_iterable))), results) @@ -305,39 +308,59 @@ def test_distinct(self): padded_docs = self._pad_with_none(documents, distinct_field) - self._validate_distinct(created_collection, 'SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), - self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), - False, [distinct_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field), - self._get_distinct_docs(self._get_order_by_docs(padded_docs, pk_field, distinct_field), distinct_field, pk_field, True), - False, [distinct_field, pk_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field), - self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, pk_field), distinct_field, pk_field, True), - False, [distinct_field, pk_field]) - - self._validate_distinct(created_collection, 'SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), - self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), - False, [distinct_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s from c' % (distinct_field), - self._get_distinct_docs(padded_docs, distinct_field, None, False), - True, [distinct_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field), - self._get_distinct_docs(padded_docs, distinct_field, pk_field, False), - True, [distinct_field, pk_field]) - - self._validate_distinct(created_collection, 'SELECT distinct value c.%s from c' % (distinct_field), - self._get_distinct_docs(padded_docs, distinct_field, None, True), - True, [distinct_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field), [], - True, [different_field]) - - self._validate_distinct(created_collection, 'SELECT distinct c.%s from c' % (different_field), ['None'], - True, different_field) + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), + results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), + is_select=False, + fields=[distinct_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field), + results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, pk_field, distinct_field), distinct_field, pk_field, True), + is_select=False, + fields=[distinct_field, pk_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field), + results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, pk_field), distinct_field, pk_field, True), + is_select=False, + fields=[distinct_field, pk_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), + results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True), + is_select=False, + fields=[distinct_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s from c' % (distinct_field), + results=self._get_distinct_docs(padded_docs, distinct_field, None, False), + is_select=True, + fields=[distinct_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field), + results=self._get_distinct_docs(padded_docs, distinct_field, pk_field, False), + is_select=True, + fields=[distinct_field, pk_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct value c.%s from c' % (distinct_field), + results=self._get_distinct_docs(padded_docs, distinct_field, None, True), + is_select=True, + fields=[distinct_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field), + results=[], + is_select=True, + fields=[different_field]) + + self._validate_distinct(created_collection=created_collection, + query='SELECT distinct c.%s from c' % (different_field), + results=['None'], + is_select=True, + fields=[different_field]) created_database.delete_container(created_collection.id) From 57c250d7700dfa427f559b96ad841d6fe6647ef5 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 26 Aug 2019 14:30:42 -0700 Subject: [PATCH 08/29] fixed hashing problem for distinct --- .../_execution_context/endpoint_component.py | 17 +++- sdk/cosmos/azure-cosmos/test/query_tests.py | 96 +++++++++++++++++++ 2 files changed, 111 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index e6099f8e511d..a33bea5c5b2e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -22,6 +22,7 @@ """Internal class for query execution endpoint component implementation in the Azure Cosmos database service. """ import numbers +import copy from azure.cosmos._execution_context.aggregators import _AverageAggregator, _CountAggregator, _MaxAggregator, \ _MinAggregator, _SumAggregator @@ -95,11 +96,23 @@ def __init__(self, execution_context): super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) self.last_result = set() + def make_hash(self, value): + if isinstance(value, (set, tuple, list)): + return tuple([self.make_hash(v) for v in value]) + elif not isinstance(value, dict): + return hash(value) + new_value = copy.deepcopy(value) + for k, v in new_value.items(): + new_value[k] = self.make_hash(v) + + return hash(tuple(frozenset(sorted(new_value.items())))) + def next(self): res = next(self._execution_context) - while str(res) in self.last_result: + hashed_result = self.make_hash(res) + while hashed_result in self.last_result: res = next(self._execution_context) - self.last_result.add(str(res)) + self.last_result.add(self.make_hash(res)) return res diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index dbf724e6cb6b..521e0ebee401 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -5,6 +5,7 @@ from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo import azure.cosmos.errors as errors from azure.cosmos.partition_key import PartitionKey +from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase import pytest import collections import test_config @@ -411,5 +412,100 @@ def _get_query_result_string(self, query_result, fields): return res + def test_distinct_on_different_types_and_field_orders(self): + created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) + self.payloads = [ + {'f1': 1, 'f2': 'value', 'f3': 100000000000000000, 'f4': [1, 2, '3'], 'f5': {'f6': {'f7': 2}}}, + {'f2': '\'value', 'f4': [1.0, 2, '3'], 'f5': {'f6': {'f7': 2.0}}, 'f1': 1.0, 'f3': 100000000000000000.00}, + {'f3': 100000000000000000.0, 'f5': {'f6': {'f7': 2}}, 'f2': '\'value', 'f1': 1, 'f4': [1, 2.0, '3']} + ] + self.OriginalExecuteFunction = _QueryExecutionContextBase.next + _QueryExecutionContextBase.next = self._MockNextFunction + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f1 from c", + expected_results=[1], + get_mock_result=lambda x, i: (None, x[i]["f1"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f2 from c", + expected_results=['value', '\'value'], + get_mock_result=lambda x, i: (None, x[i]["f2"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f2 from c order by c.f2", + expected_results=['\'value', 'value'], + get_mock_result=lambda x, i: (x[i]["f2"], x[i]["f2"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f3 from c", + expected_results=[100000000000000000], + get_mock_result=lambda x, i: (None, x[i]["f3"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f4 from c", + expected_results=[[1, 2, '3']], + get_mock_result=lambda x, i: (None, x[i]["f4"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct value c.f5.f6 from c", + expected_results=[{'f7': 2}], + get_mock_result=lambda x, i: (None, x[i]["f5"]["f6"]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct c.f1, c.f2, c.f3 from c", + expected_results=[self.payloads[0], self.payloads[1]], + get_mock_result=lambda x, i: (None, x[i]) + ) + + self._validate_distinct_on_different_types_and_field_orders( + collection=created_collection, + query="Select distinct c.f1, c.f2, c.f3 from c order by c.f1", + expected_results=[self.payloads[0], self.payloads[1]], + get_mock_result=lambda x, i: (i, x[i]) + ) + + _QueryExecutionContextBase.next = self.OriginalExecuteFunction + + def _validate_distinct_on_different_types_and_field_orders(self, collection, query, expected_results, get_mock_result): + self.count = 0 + self.get_mock_result = get_mock_result + query_iterable = collection.query_items(query, enable_cross_partition_query=True) + results = list(query_iterable) + for i in range(len(expected_results)): + if isinstance(results[i], dict): + self.assertDictEqual(results[i], expected_results[i]) + elif isinstance(results[i], list): + self.assertListEqual(results[i], expected_results[i]) + else: + self.assertEquals(results[i], expected_results[i]) + self.count = 0 + + def _MockNextFunction(self): + if self.count < len(self.payloads): + item, result = self.get_mock_result(self.payloads, self.count) + self.count += 1 + if item is not None: + return {'orderByItems': [{'item': item}], '_rid': 'fake_rid', 'payload': result} + else: + return result + return result + else: + raise StopIteration + + if __name__ == "__main__": unittest.main() \ No newline at end of file From 2b0d90f3753c610702e9df6185ce7d975399a7d3 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Tue, 27 Aug 2019 14:37:29 -0700 Subject: [PATCH 09/29] fixed bug in distinct queries --- .../azure/cosmos/_cosmos_client_connection.py | 3 + .../azure/cosmos/_default_retry_policy.py | 4 +- .../_execution_context/endpoint_component.py | 15 ++--- .../execution_dispatcher.py | 11 ++-- .../query_execution_info.py | 3 +- .../azure/cosmos/_query_iterable.py | 6 +- .../cosmos/_routing/routing_map_provider.py | 2 +- .../azure-cosmos/azure/cosmos/container.py | 2 +- .../azure-cosmos/azure/cosmos/documents.py | 5 ++ sdk/cosmos/azure-cosmos/test/query_tests.py | 57 ++++++++++++++++--- 10 files changed, 82 insertions(+), 26 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 096353fa1617..74f9dce7015c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2758,6 +2758,9 @@ def __QueryFeed(self, The request options for the request. :param str partition_key_range_id: Specifies partition key range id. + :param function response_hook: + :param bool is_query_plan: + Specififes if the call is to fetch query plan :rtype: list diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 409a54398625..68a6d711cf81 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -54,8 +54,8 @@ def __init__(self, *args): def needsRetry(self, error_code): if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: if (len(self.args) > 0): - if ((self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers'])\ - or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[4]['headers'])): + if (self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers'])\ + or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[4]['headers']): return True return False return True diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index a33bea5c5b2e..d99f00a2cdfc 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -96,23 +96,24 @@ def __init__(self, execution_context): super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) self.last_result = set() - def make_hash(self, value): + def make_hashable(self, value): if isinstance(value, (set, tuple, list)): - return tuple([self.make_hash(v) for v in value]) + return tuple([self.make_hashable(v) for v in value]) elif not isinstance(value, dict): - return hash(value) + return value new_value = copy.deepcopy(value) for k, v in new_value.items(): - new_value[k] = self.make_hash(v) + new_value[k] = self.make_hashable(v) - return hash(tuple(frozenset(sorted(new_value.items())))) + return tuple(frozenset(sorted(new_value.items()))) def next(self): res = next(self._execution_context) - hashed_result = self.make_hash(res) + hashed_result = self.make_hashable(res) while hashed_result in self.last_result: res = next(self._execution_context) - self.last_result.add(self.make_hash(res)) + hashed_result = self.make_hashable(res) + self.last_result.add(hashed_result) return res diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index cd9e4bb61aea..9962e0e00850 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -25,6 +25,7 @@ from six.moves import xrange from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase from azure.cosmos._execution_context import endpoint_component +from azure.cosmos.documents import _DistinctType class _PipelineExecutionContext(_QueryExecutionContextBase): @@ -55,20 +56,20 @@ def __init__(self, client, options, execution_context, query_execution_info): self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) offset = query_execution_info.get_offset() - if not (offset is None): + if offset is not None: self._endpoint = endpoint_component._QueryExecutionOffsetEndpointComponent(self._endpoint, offset) top = query_execution_info.get_top() - if not (top is None): + if top is not None: self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) limit = query_execution_info.get_limit() - if not (limit is None): + if limit is not None: self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) distinct_type = query_execution_info.get_distinct_type() - if distinct_type != 'None': - if distinct_type == "Ordered": + if distinct_type != _DistinctType.NoneType: + if distinct_type == _DistinctType.Ordered: self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) else: self._endpoint = endpoint_component._QueryExecutionDistinctUnorderedEndpointComponent(self._endpoint) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py index 888f5a557131..9cd1589e20c8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py @@ -23,6 +23,7 @@ """ import six +from azure.cosmos.documents import _DistinctType class _PartitionedQueryExecutionInfo(object): ''' @@ -105,7 +106,7 @@ def has_offset(self): return self.get_offset() is not None def has_distinct_type(self): - return self.get_distinct_type() != "None" + return self.get_distinct_type() != _DistinctType.NoneType def has_order_by(self): order_by = self.get_order_by() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 0493a9eb41df..00b340f956f7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -44,7 +44,9 @@ def __init__(self, client, query, options, fetch_function, resource_type, resour :param dict options: The request options for the request. :param method fetch_function: - :param str collection_link: + :param method resource_type: + The type of the resource being queried + :param str resource_link: If this is a Document query/feed collection_link is required. Example of `fetch_function`: @@ -122,7 +124,7 @@ def _use_default_query_execution_context(self): def _create_pipelined_execution_context(self, query_execution_info): - assert self._resource_link, "code bug, resource_link has is required." + assert self._resource_link, "code bug, resource_link is required." execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, self._resource_link, self._query, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py index 773067c1ddf8..c6b8982f7296 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py @@ -94,7 +94,7 @@ def _second_range_is_after_first_range(self, range1, range2): ##r.min < #previous_r.max return False else: - if (range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive): + if range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive: # the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r return False diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index ddac0b98e774..dfae1201077c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -124,7 +124,7 @@ def read( :returns: :class:`Container` instance representing the retrieved container. """ - if not request_options is not None: + if not request_options: request_options = {} # type: Dict[str, Any] if session_token is not None: request_options["sessionToken"] = session_token diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index 8ddbbc697746..9938330310c6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -430,3 +430,8 @@ class _QueryFeature(object): OffsetAndLimit = "OffsetAndLimit" OrderBy = "OrderBy" Top = "Top" + +class _DistinctType(object): + NoneType = "None" + Ordered = "Ordered" + Unordered = "Unordered" diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 521e0ebee401..de3132edf88e 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -6,6 +6,7 @@ import azure.cosmos.errors as errors from azure.cosmos.partition_key import PartitionKey from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase +from azure.cosmos.documents import _DistinctType import pytest import collections import test_config @@ -219,9 +220,35 @@ def _MockExecuteFunction(self, function, *args, **kwargs): def test_get_query_plan_through_gateway(self): created_collection = self.config.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client) - self._validate_query_plan("Select top 10 value count(c.id) from c", created_collection.container_link, 10, [], ['Count'], True, None, None, "None") - self._validate_query_plan("Select * from c order by c._ts offset 5 limit 10", created_collection.container_link, None, ['Ascending'], [], False, 5, 10, "None") - self._validate_query_plan("Select distinct value c.id from c order by c.id", created_collection.container_link, None, ['Ascending'], [], True, None, None, "Ordered") + self._validate_query_plan(query="Select top 10 value count(c.id) from c", + container_link=created_collection.container_link, + top=10, + order_by=[], + aggregate=['Count'], + select_value=True, + offset=None, + limit=None, + distinct=_DistinctType.NoneType) + + self._validate_query_plan(query="Select * from c order by c._ts offset 5 limit 10", + container_link=created_collection.container_link, + top=None, + order_by=['Ascending'], + aggregate=[], + select_value=False, + offset=5, + limit=10, + distinct=_DistinctType.NoneType) + + self._validate_query_plan(query="Select distinct value c.id from c order by c.id", + container_link=created_collection.container_link, + top=None, + order_by=['Ascending'], + aggregate=[], + select_value=True, + offset=None, + limit=None, + distinct=_DistinctType.Ordered) def _validate_query_plan(self, query, container_link, top, order_by, aggregate, select_value, offset, limit, distinct): query_plan_dict = self.client.client_connection._GetQueryPlanThroughGateway(query, container_link) @@ -266,10 +293,25 @@ def test_offset_limit(self): values.append(created_collection.create_item(body=document_definition)['pk']) for max_item_count in max_item_counts: - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', max_item_count, values[:5]) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', max_item_count, values[5:]) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', max_item_count, []) - self._validate_offset_limit(created_collection, 'SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', max_item_count, []) + self._validate_offset_limit(created_collection=created_collection, + query='SELECT * from c ORDER BY c.pk OFFSET 0 LIMIT 5', + max_item_count=max_item_count, + results=values[:5]) + + self._validate_offset_limit(created_collection=created_collection, + query='SELECT * from c ORDER BY c.pk OFFSET 5 LIMIT 10', + max_item_count=max_item_count, + results=values[5:]) + + self._validate_offset_limit(created_collection=created_collection, + query='SELECT * from c ORDER BY c.pk OFFSET 10 LIMIT 5', + max_item_count=max_item_count, + results=[]) + + self._validate_offset_limit(created_collection=created_collection, + query='SELECT * from c ORDER BY c.pk OFFSET 100 LIMIT 1', + max_item_count=max_item_count, + results=[]) def _validate_offset_limit(self, created_collection, query, max_item_count, results): query_iterable = created_collection.query_items( @@ -392,6 +434,7 @@ def _validate_distinct(self, created_collection, query, results, is_select, fiel enable_cross_partition_query=True ) query_results = list(query_iterable) + self.assertEquals(len(results), len(query_results)) query_results_strings = [] result_strings = [] From 875e55b501a9ba9378b2fc5e8ddd99219faf244c Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Wed, 28 Aug 2019 10:47:00 -0700 Subject: [PATCH 10/29] replaced single quotes with double quotes --- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 10 ++--- .../azure/cosmos/_cosmos_client_connection.py | 38 +++++++++---------- .../azure/cosmos/_default_retry_policy.py | 4 +- .../query_execution_info.py | 20 +++++----- .../azure/cosmos/_query_iterable.py | 4 +- .../azure-cosmos/azure/cosmos/documents.py | 28 +++++++------- .../azure/cosmos/http_constants.py | 10 ++--- 7 files changed, 57 insertions(+), 57 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index b932d68e602e..4622988439ab 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -138,19 +138,19 @@ def GetHeaders( # pylint: disable=too-many-statements,too-many-branches if options.get("offerThroughput"): headers[http_constants.HttpHeaders.OfferThroughput] = options["offerThroughput"] - if options.get('contentType'): + if options.get("contentType"): headers[http_constants.HttpHeaders.ContentType] = options['contentType'] - if options.get('isQueryPlanRequest'): + if options.get("isQueryPlanRequest"): headers[http_constants.HttpHeaders.IsQueryPlanRequest] = options['isQueryPlanRequest'] - if options.get('supportedQueryFeatures'): + if options.get("supportedQueryFeatures"): headers[http_constants.HttpHeaders.SupportedQueryFeatures] = options['supportedQueryFeatures'] - if options.get('queryVersion'): + if options.get("queryVersion"): headers[http_constants.HttpHeaders.QueryVersion] = options['queryVersion'] - if 'partitionKey' in options: + if "partitionKey" in options: # if partitionKey value is Undefined, serialize it as [{}] to be consistent with other SDKs. if options.get("partitionKey") is partition_key._Undefined: headers[http_constants.HttpHeaders.PartitionKey] = [{}] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 7413b25409b7..64c92d4de0b9 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -288,7 +288,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'dbs') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "dbs") def ReadContainers(self, database_link, options=None): """Reads all collections in a database. @@ -336,7 +336,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'colls') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "colls") def CreateContainer(self, database_link, collection, options=None): """Creates a collection in a database. @@ -519,7 +519,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'users') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "users") def DeleteDatabase(self, database_link, options=None): """Deletes a database. @@ -661,7 +661,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'permissions') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "permissions") def ReplaceUser(self, user_link, user, options=None): """Replaces a user and return it. @@ -818,7 +818,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'docs', database_or_Container_link) + return query_iterable.QueryIterable(self, query, options, fetch_fn, "docs", database_or_Container_link) def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None): """Queries documents change feed in a collection. @@ -947,7 +947,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'pkranges') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "pkranges") def CreateItem(self, database_or_Container_link, document, options=None): """Creates a document in a collection. @@ -1131,7 +1131,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'triggers') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "triggers") def CreateTrigger(self, collection_link, trigger, options=None): """Creates a trigger in a collection. @@ -1256,7 +1256,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'udfs') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "udfs") def CreateUserDefinedFunction(self, collection_link, udf, options=None): """Creates a user defined function in a collection. @@ -1381,7 +1381,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'sprocs') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "sprocs") def CreateStoredProcedure(self, collection_link, sproc, options=None): """Creates a stored procedure in a collection. @@ -1504,7 +1504,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'conflicts') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "conflicts") def ReadConflict(self, conflict_link, options=None): """Reads a conflict. @@ -1780,7 +1780,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'attachments') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "attachments") def ReadMedia(self, media_link): """Reads a media. @@ -2173,7 +2173,7 @@ def fetch_fn(options): self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, 'offfers') + return query_iterable.QueryIterable(self, query, options, fetch_fn, "offers") def GetDatabaseAccount(self, url_connection=None): """Gets database account info. @@ -2559,7 +2559,7 @@ def __GetBodiesFromQueryResult(result): initial_headers[http_constants.HttpHeaders.IsQuery] = "true" if not is_query_plan: - initial_headers[http_constants.HttpHeaders.IsQuery] = 'true' + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" if ( self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default @@ -2591,18 +2591,18 @@ def _GetQueryPlanThroughGateway(self, query, resource_link): documents._QueryFeature.Top) options = { - 'contentType': runtime_constants.MediaTypes.Json, - 'isQueryPlanRequest': True, - 'supportedQueryFeatures': supported_query_features, - 'queryVersion': http_constants.Versions.QueryVersion + "contentType": runtime_constants.MediaTypes.Json, + "isQueryPlanRequest": True, + "supportedQueryFeatures": supported_query_features, + "queryVersion": http_constants.Versions.QueryVersion } resource_link = base.TrimBeginningAndEndingSlashes(resource_link) - path = base.GetPathFromLink(resource_link, 'docs') + path = base.GetPathFromLink(resource_link, "docs") resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) return self.__QueryFeed(path, - 'docs', + "docs", resource_id, lambda r: r, None, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 83adea59f65b..3b75550f2c1f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -57,8 +57,8 @@ def __init__(self, *args): def needsRetry(self, error_code): if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: if (len(self.args) > 0): - if (self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers'])\ - or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[4]['headers']): + if (self.args[4]["method"] == "GET") or (http_constants.HttpHeaders.IsQuery in self.args[4]["headers"])\ + or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[4]["headers"]): return True return False return True diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py index 2dc97f878166..6c1f717c8703 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py @@ -32,16 +32,16 @@ class _PartitionedQueryExecutionInfo(object): returned by the backend. """ - QueryInfoPath = 'queryInfo' - HasSelectValue = [QueryInfoPath, 'hasSelectValue'] - TopPath = [QueryInfoPath, 'top'] - OffsetPath = [QueryInfoPath, 'offset'] - LimitPath = [QueryInfoPath, 'limit'] - DistinctTypePath = [QueryInfoPath, 'distinctType'] - OrderByPath = [QueryInfoPath, 'orderBy'] - AggregatesPath = [QueryInfoPath, 'aggregates'] - QueryRangesPath = 'queryRanges' - RewrittenQueryPath = [QueryInfoPath, 'rewrittenQuery'] + QueryInfoPath = "queryInfo" + HasSelectValue = [QueryInfoPath, "hasSelectValue"] + TopPath = [QueryInfoPath, "top"] + OffsetPath = [QueryInfoPath, "offset"] + LimitPath = [QueryInfoPath, "limit"] + DistinctTypePath = [QueryInfoPath, "distinctType"] + OrderByPath = [QueryInfoPath, "orderBy"] + AggregatesPath = [QueryInfoPath, "aggregates"] + QueryRangesPath = "queryRanges" + RewrittenQueryPath = [QueryInfoPath, "rewrittenQuery"] def __init__(self, query_execution_info): """ diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 25346361bd22..2782243ac05a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -112,14 +112,14 @@ def _create_execution_context(self): # SELECT VALUE . So we send the query down the old pipeline to avoid a breaking change. if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): - if self._options and ('enableCrossPartitionQuery' in self._options and self._options['enableCrossPartitionQuery']): + if self._options and ("enableCrossPartitionQuery" in self._options and self._options["enableCrossPartitionQuery"]): raise errors.HTTPFailure(http_constants.StatusCodes.BAD_REQUEST, "Cross partition query only supports 'VALUE ' for aggregates") return query_execution_context return self._create_pipelined_execution_context(query_execution_info) def _use_default_query_execution_context(self): - options = ['partitionKeyRangeId', 'changeFeed', 'partitionKey'] + options = ["partitionKeyRangeId", "changeFeed", "partitionKey"] if self._options: for option in options: if option in self._options: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index cbd46a068e7e..79a558fb4a13 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -396,20 +396,20 @@ def __init__(self): class _OperationType(object): """Represents the type of the operation """ - Create = 'Create' - Delete = 'Delete' - ExecuteJavaScript = 'ExecuteJavaScript' - Head = 'Head' - HeadFeed = 'HeadFeed' - Query = 'Query' - Read = 'Read' - ReadFeed = 'ReadFeed' - Recreate = 'Recreate' - Replace = 'Replace' - SqlQuery = 'SqlQuery' - QueryPlan = 'QueryPlan' - Update = 'Update' - Upsert = 'Upsert' + Create = "Create" + Delete = "Delete" + ExecuteJavaScript = "ExecuteJavaScript" + Head = "Head" + HeadFeed = "HeadFeed" + Query = "Query" + Read = "Read" + ReadFeed = "ReadFeed" + Recreate = "Recreate" + Replace = "Replace" + SqlQuery = "SqlQuery" + QueryPlan = "QueryPlan" + Update = "Update" + Upsert = "Upsert" @staticmethod def IsWriteOperation(operationType): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py index d6c6092046f5..197b283d0166 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py @@ -87,8 +87,8 @@ class HttpHeaders: Referer = "referer" # Query - Query = 'x-ms-documentdb-query' - IsQuery = 'x-ms-documentdb-isquery' + Query = "x-ms-documentdb-query" + IsQuery = "x-ms-documentdb-isquery" IsQueryPlanRequest = "x-ms-cosmos-is-query-plan-request" SupportedQueryFeatures = "x-ms-cosmos-supported-query-features" QueryVersion = "x-ms-cosmos-query-version" @@ -265,9 +265,9 @@ class CookieHeaders: class Versions: """Constants of versions. """ - CurrentVersion = '2018-12-31' - SDKName = 'azure-cosmos' - SDKVersion = '4.0.0a1' + CurrentVersion = "2018-12-31" + SDKName = "azure-cosmos" + SDKVersion = "4.0.0a1" QueryVersion = "1.0" From fb40ea9d5aaa30c3799dc56a7b3c161fbc0bec96 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 29 Aug 2019 12:01:01 -0700 Subject: [PATCH 11/29] re introduced hashing via sha1 --- .../_execution_context/endpoint_component.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 5223d1495761..13b942dbdd9e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -23,6 +23,8 @@ """ import numbers import copy +import hashlib +import json from azure.cosmos._execution_context.aggregators import ( _AverageAggregator, @@ -102,23 +104,27 @@ def __init__(self, execution_context): super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) self.last_result = set() - def make_hashable(self, value): + def make_hash(self, value): if isinstance(value, (set, tuple, list)): - return tuple([self.make_hashable(v) for v in value]) + return tuple([self.make_hash(v) for v in value]) elif not isinstance(value, dict): + if isinstance(value, numbers.Number): + return float(value) return value new_value = copy.deepcopy(value) for k, v in new_value.items(): - new_value[k] = self.make_hashable(v) + new_value[k] = self.make_hash(v) return tuple(frozenset(sorted(new_value.items()))) def next(self): res = next(self._execution_context) - hashed_result = self.make_hashable(res) + hash_object = hashlib.sha1(json.dumps(self.make_hash(res))) + hashed_result = hash_object.hexdigest() while hashed_result in self.last_result: res = next(self._execution_context) - hashed_result = self.make_hashable(res) + hash_object = hashlib.sha1(json.dumps(self.make_hash(res))) + hashed_result = hash_object.hexdigest() self.last_result.add(hashed_result) return res From 0db6371c354282867f20e2d6b672ef067040ae1a Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 29 Aug 2019 12:11:12 -0700 Subject: [PATCH 12/29] fixed bug in distinct for py3 --- .../_execution_context/endpoint_component.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 13b942dbdd9e..7dd089617cbd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -25,6 +25,7 @@ import copy import hashlib import json +import six from azure.cosmos._execution_context.aggregators import ( _AverageAggregator, @@ -119,11 +120,21 @@ def make_hash(self, value): def next(self): res = next(self._execution_context) - hash_object = hashlib.sha1(json.dumps(self.make_hash(res))) + + json_repr = json.dumps(self.make_hash(res)) + if six.PY3: + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) hashed_result = hash_object.hexdigest() + while hashed_result in self.last_result: res = next(self._execution_context) - hash_object = hashlib.sha1(json.dumps(self.make_hash(res))) + json_repr = json.dumps(self.make_hash(res)) + if six.PY3: + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) hashed_result = hash_object.hexdigest() self.last_result.add(hashed_result) return res From 9e33c162e22b3490886d48be8b888f9f4182d2e1 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 29 Aug 2019 16:13:16 -0700 Subject: [PATCH 13/29] dummy commit --- sdk/cosmos/azure-cosmos/test/query_tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index de3132edf88e..00c40d713162 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -11,6 +11,7 @@ import collections import test_config + pytestmark = pytest.mark.cosmosEmulator @pytest.mark.usefixtures("teardown") From f0e356af1d3c61ff13b0ce0ddbe281fc384fe9ac Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 29 Aug 2019 16:13:27 -0700 Subject: [PATCH 14/29] dummy commit --- sdk/cosmos/azure-cosmos/test/query_tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 00c40d713162..de3132edf88e 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -11,7 +11,6 @@ import collections import test_config - pytestmark = pytest.mark.cosmosEmulator @pytest.mark.usefixtures("teardown") From 73af86a0c6baa52399214f9611612f2806b7f998 Mon Sep 17 00:00:00 2001 From: annatisch Date: Thu, 29 Aug 2019 16:59:40 -0700 Subject: [PATCH 15/29] [Cosmos] Core pipeline integration (#6961) * Updated dependencies * Added core pipeline * Ignore test config * Fixed indexes test * Refactored request creation * Fixed index test * Added trace decorators * Bumped version * Updated policies * Renamed request_options -> request_params --- .gitignore | 1 + .../azure-cosmos/azure/cosmos/__init__.py | 2 + .../azure/cosmos/_cosmos_client_connection.py | 193 ++++++++++-------- .../azure/cosmos/_default_retry_policy.py | 2 +- .../azure/cosmos/_synchronized_request.py | 130 +++++------- .../azure-cosmos/azure/cosmos/container.py | 71 +++++-- .../azure/cosmos/cosmos_client.py | 27 ++- .../azure-cosmos/azure/cosmos/database.py | 72 +++++-- sdk/cosmos/azure-cosmos/azure/cosmos/user.py | 48 +++-- .../azure-cosmos/azure/cosmos/version.py | 22 ++ sdk/cosmos/azure-cosmos/setup.py | 10 +- sdk/cosmos/azure-cosmos/test/crud_tests.py | 6 +- .../azure-cosmos/test/multimaster_tests.py | 4 +- sdk/cosmos/azure-cosmos/test/proxy_tests.py | 4 +- sdk/cosmos/azure-cosmos/test/session_tests.py | 8 +- 15 files changed, 361 insertions(+), 239 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/version.py diff --git a/.gitignore b/.gitignore index ce9cb766b2c1..75f5dc5dab54 100644 --- a/.gitignore +++ b/.gitignore @@ -90,3 +90,4 @@ sdk/storage/azure-storage-blob/tests/settings_real.py sdk/storage/azure-storage-queue/tests/settings_real.py sdk/storage/azure-storage-file/tests/settings_real.py *.code-workspace +sdk/cosmos/azure-cosmos/test/test_config.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 19e73780aa50..07f3ca79fb93 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -37,6 +37,7 @@ from .permission import Permission from .scripts import Scripts from .user import User +from .version import VERSION __all__ = ( "Container", @@ -56,3 +57,4 @@ "TriggerOperation", "TriggerType", ) +__version__ = VERSION \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 903e8882b2f8..23c5c4c432d7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -24,9 +24,20 @@ """Document client class for the Azure Cosmos database service. """ -import requests +import platform +import requests import six +from azure.core import PipelineClient +from azure.core.pipeline.policies import ( + ContentDecodePolicy, + HeadersPolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + ProxyPolicy) +from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy + from . import _base as base from . import documents from . import _constants as constants @@ -40,6 +51,7 @@ from . import _session from . import _utils from .partition_key import _Undefined, _Empty +from .version import VERSION # pylint: disable=protected-access @@ -132,15 +144,28 @@ def __init__( self._useMultipleWriteLocations = False self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) - # creating a requests session used for connection pooling and re-used by all requests - self._requests_session = requests.Session() - + proxies = {} if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: host = connection_policy.ProxyConfiguration.Host url = six.moves.urllib.parse.urlparse(host) proxy = host if url.port else host + ":" + str(connection_policy.ProxyConfiguration.Port) - proxyDict = {url.scheme: proxy} - self._requests_session.proxies.update(proxyDict) + proxies = {url.scheme : proxy} + user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format( + VERSION, + platform.python_version(), + platform.platform()) + + policies = [ + HeadersPolicy(), + ProxyPolicy(proxies=proxies), + UserAgentPolicy(base_user_agent=user_agent), + ContentDecodePolicy(), + CustomHookPolicy(), + DistributedTracingPolicy(), + NetworkTraceLoggingPolicy(), + ] + + self.pipeline_client = PipelineClient(url_connection, "empty-config", policies=policies) # Query compatibility mode. # Allows to specify compatibility mode used by client when making query requests. Should be removed when @@ -1782,7 +1807,7 @@ def fetch_fn(options): return query_iterable.QueryIterable(self, query, options, fetch_fn) - def ReadMedia(self, media_link): + def ReadMedia(self, media_link, **kwargs): """Reads a media. When self.connection_policy.MediaReadMode == @@ -1806,11 +1831,11 @@ def ReadMedia(self, media_link): headers = base.GetHeaders(self, default_headers, "get", path, attachment_id, "media", {}) # ReadMedia will always use WriteEndpoint since it's not replicated in readable Geo regions - request = _request_object.RequestObject("media", documents._OperationType.Read) - result, self.last_response_headers = self.__Get(path, request, headers) + request_params = _request_object.RequestObject("media", documents._OperationType.Read) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) return result - def UpdateMedia(self, media_link, readable_stream, options=None): + def UpdateMedia(self, media_link, readable_stream, options=None, **kwargs): """Updates a media and returns it. :param str media_link: @@ -1845,8 +1870,8 @@ def UpdateMedia(self, media_link, readable_stream, options=None): headers = base.GetHeaders(self, initial_headers, "put", path, attachment_id, "media", options) # UpdateMedia will use WriteEndpoint since it uses PUT operation - request = _request_object.RequestObject("media", documents._OperationType.Update) - result, self.last_response_headers = self.__Put(path, request, readable_stream, headers) + request_params = _request_object.RequestObject("media", documents._OperationType.Update) + result, self.last_response_headers = self.__Put(path, request_params, readable_stream, headers, **kwargs) self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result @@ -1995,7 +2020,7 @@ def DeleteUserDefinedFunction(self, udf_link, options=None): udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) return self.DeleteResource(path, "udfs", udf_id, None, options) - def ExecuteStoredProcedure(self, sproc_link, params, options=None): + def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): """Executes a store procedure. :param str sproc_link: @@ -2025,8 +2050,8 @@ def ExecuteStoredProcedure(self, sproc_link, params, options=None): headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation - request = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) - result, self.last_response_headers = self.__Post(path, request, params, headers) + request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) + result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) return result def ReplaceStoredProcedure(self, sproc_link, sproc, options=None): @@ -2175,7 +2200,7 @@ def fetch_fn(options): return query_iterable.QueryIterable(self, query, options, fetch_fn) - def GetDatabaseAccount(self, url_connection=None): + def GetDatabaseAccount(self, url_connection=None, **kwargs): """Gets database account info. :return: @@ -2190,8 +2215,8 @@ def GetDatabaseAccount(self, url_connection=None): initial_headers = dict(self.default_headers) headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type - request = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) - result, self.last_response_headers = self.__Get("", request, headers) + request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) + result, self.last_response_headers = self.__Get("", request_params, headers, **kwargs) database_account = documents.DatabaseAccount() database_account.DatabasesLink = "/dbs/" database_account.MediaLink = "/media/" @@ -2220,7 +2245,7 @@ def GetDatabaseAccount(self, url_connection=None): ) return database_account - def Create(self, body, path, typ, id, initial_headers, options=None): # pylint: disable=redefined-builtin + def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Creates a Azure Cosmos resource and returns it. :param dict body: @@ -2244,14 +2269,14 @@ def Create(self, body, path, typ, id, initial_headers, options=None): # pylint: headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) # Create will use WriteEndpoint since it uses POST operation - request = _request_object.RequestObject(typ, documents._OperationType.Create) - result, self.last_response_headers = self.__Post(path, request, body, headers) + request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) # update session for write request self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def Upsert(self, body, path, typ, id, initial_headers, options=None): # pylint: disable=redefined-builtin + def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Upserts a Azure Cosmos resource and returns it. :param dict body: @@ -2277,13 +2302,13 @@ def Upsert(self, body, path, typ, id, initial_headers, options=None): # pylint: headers[http_constants.HttpHeaders.IsUpsert] = True # Upsert will use WriteEndpoint since it uses POST operation - request = _request_object.RequestObject(typ, documents._OperationType.Upsert) - result, self.last_response_headers = self.__Post(path, request, body, headers) + request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) # update session for write request self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def Replace(self, resource, path, typ, id, initial_headers, options=None): # pylint: disable=redefined-builtin + def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Replaces a Azure Cosmos resource and returns it. :param dict resource: @@ -2306,14 +2331,14 @@ def Replace(self, resource, path, typ, id, initial_headers, options=None): # py initial_headers = initial_headers or self.default_headers headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) # Replace will use WriteEndpoint since it uses PUT operation - request = _request_object.RequestObject(typ, documents._OperationType.Replace) - result, self.last_response_headers = self.__Put(path, request, resource, headers) + request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + result, self.last_response_headers = self.__Put(path, request_params, resource, headers, **kwargs) # update session for request mutates data on server side self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def Read(self, path, typ, id, initial_headers, options=None): # pylint: disable=redefined-builtin + def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Reads a Azure Cosmos resource and returns it. :param str path: @@ -2335,11 +2360,11 @@ def Read(self, path, typ, id, initial_headers, options=None): # pylint: disable initial_headers = initial_headers or self.default_headers headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) # Read will use ReadEndpoint since it uses GET operation - request = _request_object.RequestObject(typ, documents._OperationType.Read) - result, self.last_response_headers = self.__Get(path, request, headers) + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) return result - def DeleteResource(self, path, typ, id, initial_headers, options=None): # pylint: disable=redefined-builtin + def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Deletes a Azure Cosmos resource and returns it. :param str path: @@ -2361,15 +2386,15 @@ def DeleteResource(self, path, typ, id, initial_headers, options=None): # pylin initial_headers = initial_headers or self.default_headers headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) # Delete will use WriteEndpoint since it uses DELETE operation - request = _request_object.RequestObject(typ, documents._OperationType.Delete) - result, self.last_response_headers = self.__Delete(path, request, headers) + request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + result, self.last_response_headers = self.__Delete(path, request_params, headers, **kwargs) # update session for request mutates data on server side self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def __Get(self, path, request, headers): + def __Get(self, path, request_params, headers, **kwargs): """Azure Cosmos 'GET' http request. :params str url: @@ -2382,20 +2407,19 @@ def __Get(self, path, request, headers): tuple of (dict, dict) """ + request = self.pipeline_client.get(url=path, headers=headers) return synchronized_request.SynchronizedRequest( - self, - request, - self._global_endpoint_manager, - self.connection_policy, - self._requests_session, - "GET", - path, - None, - None, - headers, + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs ) - def __Post(self, path, request, body, headers): + def __Post(self, path, request_params, body, headers, **kwargs): """Azure Cosmos 'POST' http request. :params str url: @@ -2409,20 +2433,19 @@ def __Post(self, path, request, body, headers): tuple of (dict, dict) """ + request = self.pipeline_client.post(url=path, headers=headers) return synchronized_request.SynchronizedRequest( - self, - request, - self._global_endpoint_manager, - self.connection_policy, - self._requests_session, - "POST", - path, - body, - query_params=None, - headers=headers, + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs ) - def __Put(self, path, request, body, headers): + def __Put(self, path, request_params, body, headers, **kwargs): """Azure Cosmos 'PUT' http request. :params str url: @@ -2436,20 +2459,19 @@ def __Put(self, path, request, body, headers): tuple of (dict, dict) """ + request = self.pipeline_client.put(url=path, headers=headers) return synchronized_request.SynchronizedRequest( - self, - request, - self._global_endpoint_manager, - self.connection_policy, - self._requests_session, - "PUT", - path, - body, - query_params=None, - headers=headers, + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs ) - def __Delete(self, path, request, headers): + def __Delete(self, path, request_params, headers, **kwargs): """Azure Cosmos 'DELETE' http request. :params str url: @@ -2462,17 +2484,16 @@ def __Delete(self, path, request, headers): tuple of (dict, dict) """ + request = self.pipeline_client.delete(url=path, headers=headers) return synchronized_request.SynchronizedRequest( - self, - request, - self._global_endpoint_manager, - self.connection_policy, - self._requests_session, - "DELETE", - path, + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, request_data=None, - query_params=None, - headers=headers, + **kwargs ) def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None): @@ -2506,7 +2527,17 @@ def QueryFeed(self, path, collection_id, query, options, partition_key_range_id= ) def __QueryFeed( - self, path, typ, id_, result_fn, create_fn, query, options=None, partition_key_range_id=None, response_hook=None + self, + path, + typ, + id_, + result_fn, + create_fn, + query, + options=None, + partition_key_range_id=None, + response_hook=None, + **kwargs ): """Query for more than one Azure Cosmos resources. @@ -2545,9 +2576,9 @@ def __GetBodiesFromQueryResult(result): # Copy to make sure that default_headers won't be changed. if query is None: # Query operations will use ReadEndpoint even though it uses GET(for feed requests) - request = _request_object.RequestObject(typ, documents._OperationType.ReadFeed) + request_params = _request_object.RequestObject(typ, documents._OperationType.ReadFeed) headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Get(path, request, headers) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) if response_hook: response_hook(self.last_response_headers, result) return __GetBodiesFromQueryResult(result) @@ -2566,9 +2597,9 @@ def __GetBodiesFromQueryResult(result): raise SystemError("Unexpected query compatibility mode.") # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) - request = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Post(path, request, query, headers) + result, self.last_response_headers = self.__Post(path, request_params, query, headers, **kwargs) if response_hook: response_hook(self.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index a1eb0f51eab0..2e07955ab0ea 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -57,7 +57,7 @@ def __init__(self, *args): def needsRetry(self, error_code): if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: if self.args: - if (self.args[4]["method"] == "GET") or (http_constants.HttpHeaders.IsQuery in self.args[4]["headers"]): + if (self.args[3].method == "GET") or (http_constants.HttpHeaders.IsQuery in self.args[3].headers): return True return False return True diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py index 09970fe93bc5..618541feb6b0 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py @@ -33,19 +33,17 @@ from . import _retry_utility -def _IsReadableStream(obj): +def _is_readable_stream(obj): """Checks whether obj is a file-like readable stream. - :rtype: - boolean - + :rtype: boolean """ if hasattr(obj, "read") and callable(getattr(obj, "read")): return True return False -def _RequestBodyFromData(data): +def _request_body_from_data(data): """Gets request body from data. When `data` is dict and list into unicode string; otherwise return `data` @@ -57,7 +55,7 @@ def _RequestBodyFromData(data): str, unicode, file-like stream object, or None """ - if isinstance(data, six.string_types) or _IsReadableStream(data): + if data is None or isinstance(data, six.string_types) or _is_readable_stream(data): return data if isinstance(data, (dict, list, tuple)): @@ -66,27 +64,21 @@ def _RequestBodyFromData(data): if six.PY2: return json_dumped.decode("utf-8") return json_dumped - return None -def _Request( - global_endpoint_manager, request, connection_policy, requests_session, path, request_options, request_body -): +def _Request(global_endpoint_manager, request_params, connection_policy, pipeline_client, request, **kwargs): """Makes one http request using the requests module. :param _GlobalEndpointManager global_endpoint_manager: - :param dict request: + :param dict request_params: contains the resourceType, operationType, endpointOverride, useWriteEndpoint, useAlternateWriteEndpoint information :param documents.ConnectionPolicy connection_policy: - :param requests.Session requests_session: - Session object in requests module - :param str resource_url: - The url for the resource - :param dict request_options: - :param str request_body: - Unicode or None + :param azure.core.PipelineClient pipeline_client: + Pipeline client to process the resquest + :param azure.core.HttpRequest request: + The request object to send through the pipeline :return: tuple of (result, headers) @@ -94,29 +86,27 @@ def _Request( tuple of (dict, dict) """ - is_media = request_options["path"].find("media") > -1 + is_media = request.url.find("media") > -1 is_media_stream = is_media and connection_policy.MediaReadMode == documents.MediaReadMode.Streamed connection_timeout = connection_policy.MediaRequestTimeout if is_media else connection_policy.RequestTimeout + connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) # Every request tries to perform a refresh global_endpoint_manager.refresh_endpoint_list(None) - if request.endpoint_override: - base_url = request.endpoint_override + if request_params.endpoint_override: + base_url = request_params.endpoint_override else: - base_url = global_endpoint_manager.resolve_service_endpoint(request) + base_url = global_endpoint_manager.resolve_service_endpoint(request_params) + if base_url != pipeline_client._base_url: + request.url = request.url.replace(pipeline_client._base_url, base_url) - if path: - resource_url = base_url + path - else: - resource_url = base_url - - parse_result = urlparse(resource_url) + parse_result = urlparse(request.url) # The requests library now expects header values to be strings only starting 2.11, # and will raise an error on validation if they are not, so casting all header values to strings. - request_options["headers"] = {header: str(value) for header, value in request_options["headers"].items()} + request.headers.update({header: str(value) for header, value in request.headers.items()}) # We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user # has explicitly specified to disable SSL verification. @@ -126,40 +116,35 @@ def _Request( and not connection_policy.DisableSSLVerification ) - if connection_policy.SSLConfiguration: + if connection_policy.SSLConfiguration or "connection_cert" in kwargs: ca_certs = connection_policy.SSLConfiguration.SSLCaCerts cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile) - - response = requests_session.request( - request_options["method"], - resource_url, - data=request_body, - headers=request_options["headers"], - timeout=connection_timeout / 1000.0, + response = pipeline_client._pipeline.run( + request, stream=is_media_stream, - verify=ca_certs, - cert=cert_files, + connection_timeout=connection_timeout, + connection_verify=kwargs.pop("connection_verify", ca_certs), + connection_cert=kwargs.pop("connection_cert", cert_files), + ) else: - response = requests_session.request( - request_options["method"], - resource_url, - data=request_body, - headers=request_options["headers"], - timeout=connection_timeout / 1000.0, + response = pipeline_client._pipeline.run( + request, stream=is_media_stream, + connection_timeout=connection_timeout, # If SSL is disabled, verify = false - verify=is_ssl_enabled, + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled) ) + response = response.http_response headers = dict(response.headers) # In case of media stream response, return the response to the user and the user # will need to handle reading the response. if is_media_stream: - return (response.raw, headers) + return (response.stream_download(pipeline_client._pipeline), headers) - data = response.content + data = response.body() if not six.PY2: # python 3 compatible: convert data from byte to unicode string data = data.decode("utf-8") @@ -182,25 +167,23 @@ def _Request( def SynchronizedRequest( client, - request, + request_params, global_endpoint_manager, connection_policy, - requests_session, - method, - path, + pipeline_client, + request, request_data, - query_params, - headers, + **kwargs ): """Performs one synchronized http request according to the parameters. :param object client: Document client instance - :param dict request: - :param _GlobalEndpointManager global_endpoint_manager: + :param dict request_params: + :param _GlobalEndpointManager global_endpoint_manager: :param documents.ConnectionPolicy connection_policy: - :param requests.Session requests_session: - Session object in requests module + :param azure.core.PipelineClient pipeline_client: + PipelineClient to process the request. :param str method: :param str path: :param (str, unicode, file-like stream object, dict, list or None) request_data: @@ -213,33 +196,20 @@ def SynchronizedRequest( tuple of (dict dict) """ - request_body = None - if request_data: - request_body = _RequestBodyFromData(request_data) - if not request_body: - raise errors.UnexpectedDataType("parameter data must be a JSON object, string or" + " readable stream.") - - request_options = {} - request_options["path"] = path - request_options["method"] = method - if query_params: - request_options["path"] += "?" + urlencode(query_params) - - request_options["headers"] = headers - if request_body and isinstance(request_body, (str, six.text_type)): - request_options["headers"][http_constants.HttpHeaders.ContentLength] = len(request_body) - elif request_body is None: - request_options["headers"][http_constants.HttpHeaders.ContentLength] = 0 + request.data = _request_body_from_data(request_data) + if request.data and isinstance(request.data, six.string_types): + request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) + elif request.data is None: + request.headers[http_constants.HttpHeaders.ContentLength] = 0 # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries return _retry_utility.Execute( client, global_endpoint_manager, _Request, - request, + request_params, connection_policy, - requests_session, - path, - request_options, - request_body, + pipeline_client, + request, + **kwargs ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 3abe9b3cea93..db929854e529 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -25,6 +25,7 @@ from typing import Any, Callable, Dict, List, Optional, Union import six +from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection import CosmosClientConnection from .errors import HTTPFailure @@ -96,6 +97,7 @@ def _get_conflict_link(self, conflict_or_link): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] + @distributed_trace def read( self, session_token=None, @@ -105,6 +107,7 @@ def read( populate_quota_info=None, request_options=None, response_hook=None, + **kwargs ): # type: (str, Dict[str, str], bool, bool, bool, Dict[str, Any], Optional[Callable]) -> Container """ Read the container properties @@ -136,13 +139,14 @@ def read( request_options["populateQuotaInfo"] = populate_quota_info collection_link = self.container_link - self._properties = self.client_connection.ReadContainer(collection_link, options=request_options) + self._properties = self.client_connection.ReadContainer(collection_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) return self._properties + @distributed_trace def read_item( self, item, # type: Union[str, Dict[str, Any]] @@ -153,6 +157,7 @@ def read_item( post_trigger_include=None, # type: str request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Dict[str, str] """ @@ -193,11 +198,12 @@ def read_item( if post_trigger_include: request_options["postTriggerInclude"] = post_trigger_include - result = self.client_connection.ReadItem(document_link=doc_link, options=request_options) + result = self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def read_all_items( self, max_item_count=None, @@ -206,6 +212,7 @@ def read_all_items( populate_query_metrics=None, feed_options=None, response_hook=None, + **kwargs ): # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable """ List all items in the container. @@ -233,12 +240,13 @@ def read_all_items( response_hook.clear() items = self.client_connection.ReadItems( - collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook + collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, items) return items + @distributed_trace def query_items_change_feed( self, partition_key_range_id=None, @@ -247,6 +255,7 @@ def query_items_change_feed( max_item_count=None, feed_options=None, response_hook=None, + **kwargs ): """ Get a sorted list of items that were changed, in the order in which they were modified. @@ -277,12 +286,13 @@ def query_items_change_feed( response_hook.clear() result = self.client_connection.QueryItemsChangeFeed( - self.container_link, options=feed_options, response_hook=response_hook + self.container_link, options=feed_options, response_hook=response_hook, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def query_items( self, query, # type: str @@ -296,6 +306,7 @@ def query_items( populate_query_metrics=None, # type: bool feed_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> QueryIterable """Return all results matching the given `query`. @@ -363,11 +374,13 @@ def query_items( options=feed_options, partition_key=partition_key, response_hook=response_hook, + **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, items) return items + @distributed_trace def replace_item( self, item, # type: Union[str, Dict[str, Any]] @@ -380,6 +393,7 @@ def replace_item( post_trigger_include=None, # type: str request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Dict[str, str] """ Replaces the specified item if it exists in the container. @@ -415,11 +429,14 @@ def replace_item( if post_trigger_include: request_options["postTriggerInclude"] = post_trigger_include - result = self.client_connection.ReplaceItem(document_link=item_link, new_document=body, options=request_options) + result = self.client_connection.ReplaceItem( + document_link=item_link, new_document=body, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def upsert_item( self, body, # type: Dict[str, Any] @@ -431,6 +448,7 @@ def upsert_item( post_trigger_include=None, # type: str request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Dict[str, str] """ Insert or update the specified item. @@ -466,11 +484,13 @@ def upsert_item( if post_trigger_include: request_options["postTriggerInclude"] = post_trigger_include - result = self.client_connection.UpsertItem(database_or_Container_link=self.container_link, document=body) + result = self.client_connection.UpsertItem( + database_or_Container_link=self.container_link, document=body, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def create_item( self, body, # type: Dict[str, Any] @@ -483,6 +503,7 @@ def create_item( indexing_directive=None, # type: Any request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Dict[str, str] """ Create an item in the container. @@ -523,12 +544,13 @@ def create_item( request_options["indexingDirective"] = indexing_directive result = self.client_connection.CreateItem( - database_or_Container_link=self.container_link, document=body, options=request_options + database_or_Container_link=self.container_link, document=body, options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def delete_item( self, item, # type: Union[Dict[str, Any], str] @@ -541,6 +563,7 @@ def delete_item( post_trigger_include=None, # type: str request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> None """ Delete the specified item from the container. @@ -577,11 +600,12 @@ def delete_item( request_options["postTriggerInclude"] = post_trigger_include document_link = self._get_document_link(item) - result = self.client_connection.DeleteItem(document_link=document_link, options=request_options) + result = self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) - def read_offer(self, response_hook=None): + @distributed_trace + def read_offer(self, response_hook=None, **kwargs): # type: (Optional[Callable]) -> Offer """ Read the Offer object for this container. @@ -596,7 +620,7 @@ def read_offer(self, response_hook=None): "query": "SELECT * FROM root r WHERE r.resource=@link", "parameters": [{"name": "@link", "value": link}], } - offers = list(self.client_connection.QueryOffers(query_spec)) + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) @@ -605,7 +629,8 @@ def read_offer(self, response_hook=None): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) - def replace_throughput(self, throughput, response_hook=None): + @distributed_trace + def replace_throughput(self, throughput, response_hook=None, **kwargs): # type: (int, Optional[Callable]) -> Offer """ Replace the container's throughput @@ -621,19 +646,20 @@ def replace_throughput(self, throughput, response_hook=None): "query": "SELECT * FROM root r WHERE r.resource=@link", "parameters": [{"name": "@link", "value": link}], } - offers = list(self.client_connection.QueryOffers(query_spec)) + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput - data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0]) + data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, data) return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) - def read_all_conflicts(self, max_item_count=None, feed_options=None, response_hook=None): + @distributed_trace + def read_all_conflicts(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable """ List all conflicts in the container. @@ -648,11 +674,14 @@ def read_all_conflicts(self, max_item_count=None, feed_options=None, response_ho if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - result = self.client_connection.ReadConflicts(collection_link=self.container_link, feed_options=feed_options) + result = self.client_connection.ReadConflicts( + collection_link=self.container_link, feed_options=feed_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def query_conflicts( self, query, @@ -662,6 +691,7 @@ def query_conflicts( max_item_count=None, feed_options=None, response_hook=None, + **kwargs ): # type: (str, List, bool, Any, int, Dict[str, Any], Optional[Callable]) -> QueryIterable """Return all conflicts matching the given `query`. @@ -691,12 +721,14 @@ def query_conflicts( collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result - def get_conflict(self, conflict, partition_key, request_options=None, response_hook=None): + @distributed_trace + def get_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> Dict[str, str] """ Get the conflict identified by `id`. @@ -714,13 +746,14 @@ def get_conflict(self, conflict, partition_key, request_options=None, response_h request_options["partitionKey"] = self._set_partition_key(partition_key) result = self.client_connection.ReadConflict( - conflict_link=self._get_conflict_link(conflict), options=request_options + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result - def delete_conflict(self, conflict, partition_key, request_options=None, response_hook=None): + @distributed_trace + def delete_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> None """ Delete the specified conflict from the container. @@ -738,7 +771,7 @@ def delete_conflict(self, conflict, partition_key, request_options=None, respons request_options["partitionKey"] = self._set_partition_key(partition_key) result = self.client_connection.DeleteConflict( - conflict_link=self._get_conflict_link(conflict), options=request_options + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 6d62de891fb8..9b0aa1836831 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -25,6 +25,7 @@ from typing import Any, Callable, Dict, Mapping, Optional, Union, cast import six +from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection import CosmosClientConnection from .database import Database @@ -80,6 +81,7 @@ def _get_database_link(database_or_id): database_id = cast("Dict[str, str]", database_or_id)["id"] return "dbs/{}".format(database_id) + @distributed_trace def create_database( self, id, # pylint: disable=redefined-builtin @@ -90,6 +92,7 @@ def create_database( offer_throughput=None, request_options=None, response_hook=None, + **kwargs ): # type: (str, str, Dict[str, str], Dict[str, str], bool, int, Dict[str, Any], Optional[Callable]) -> Database """Create a new database with the given ID (name). @@ -128,7 +131,7 @@ def create_database( if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput - result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options) + result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) return Database(self.client_connection, id=result["id"], properties=result) @@ -152,6 +155,7 @@ def get_database_client(self, database): return Database(self.client_connection, id_value) + @distributed_trace def read_all_databases( self, max_item_count=None, @@ -160,6 +164,7 @@ def read_all_databases( populate_query_metrics=None, feed_options=None, response_hook=None, + **kwargs ): # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable """ @@ -185,11 +190,12 @@ def read_all_databases( if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics - result = self.client_connection.ReadDatabases(options=feed_options) + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) return result + @distributed_trace def query_databases( self, query=None, # type: str @@ -201,6 +207,7 @@ def query_databases( populate_query_metrics=None, # type: bool feed_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> QueryIterable @@ -239,15 +246,15 @@ def query_databases( # (just returning a generator did not initiate the first network call, so # the headers were misleading) # This needs to change for "real" implementation - result = self.client_connection.QueryDatabases( - query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options - ) + query = query if parameters is None else dict(query=query, parameters=parameters) + result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) else: - result = self.client_connection.ReadDatabases(options=feed_options) + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) return result + @distributed_trace def delete_database( self, database, # type: Union[str, Database, Dict[str, Any]] @@ -257,6 +264,7 @@ def delete_database( populate_query_metrics=None, # type: bool request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> None """ @@ -285,11 +293,12 @@ def delete_database( request_options["populateQueryMetrics"] = populate_query_metrics database_link = self._get_database_link(database) - self.client_connection.DeleteDatabase(database_link, options=request_options) + self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) - def get_database_account(self, response_hook=None): + @distributed_trace + def get_database_account(self, response_hook=None, **kwargs): # type: (Optional[Callable]) -> DatabaseAccount """ Retrieve the database account information. @@ -298,7 +307,7 @@ def get_database_account(self, response_hook=None): :returns: A :class:`DatabaseAccount` instance representing the Cosmos DB Database Account. """ - result = self.client_connection.GetDatabaseAccount() + result = self.client_connection.GetDatabaseAccount(**kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) return result diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 6d2335986ec0..0df8589b70c2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -25,6 +25,7 @@ from typing import Any, List, Dict, Mapping, Union, cast import six +from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection import CosmosClientConnection from .container import Container @@ -103,6 +104,7 @@ def _get_properties(self): self.read() return self._properties + @distributed_trace def read( self, session_token=None, @@ -110,6 +112,7 @@ def read( populate_query_metrics=None, request_options=None, response_hook=None, + **kwargs ): # type: (str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> Dict[str, Any] """ @@ -139,13 +142,14 @@ def read( if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - self._properties = self.client_connection.ReadDatabase(database_link, options=request_options) + self._properties = self.client_connection.ReadDatabase(database_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) return self._properties + @distributed_trace def create_container( self, id, # type: str # pylint: disable=redefined-builtin @@ -161,6 +165,7 @@ def create_container( conflict_resolution_policy=None, # type: Dict[str, Any] request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Container """ @@ -228,7 +233,7 @@ def create_container( request_options["offerThroughput"] = offer_throughput data = self.client_connection.CreateContainer( - database_link=self.database_link, collection=definition, options=request_options + database_link=self.database_link, collection=definition, options=request_options, **kwargs ) if response_hook: @@ -236,6 +241,7 @@ def create_container( return Container(self.client_connection, self.database_link, data["id"], properties=data) + @distributed_trace def delete_container( self, container, # type: Union[str, Container, Dict[str, Any]] @@ -245,6 +251,7 @@ def delete_container( populate_query_metrics=None, # type: bool request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> None """ Delete the container @@ -273,7 +280,7 @@ def delete_container( request_options["populateQueryMetrics"] = populate_query_metrics collection_link = self._get_container_link(container) - result = self.client_connection.DeleteContainer(collection_link, options=request_options) + result = self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) @@ -302,6 +309,7 @@ def get_container_client(self, container): return Container(self.client_connection, self.database_link, id_value) + @distributed_trace def read_all_containers( self, max_item_count=None, @@ -310,6 +318,7 @@ def read_all_containers( populate_query_metrics=None, feed_options=None, response_hook=None, + **kwargs ): # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable """ List the containers in the database. @@ -342,11 +351,14 @@ def read_all_containers( if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics - result = self.client_connection.ReadContainers(database_link=self.database_link, options=feed_options) + result = self.client_connection.ReadContainers( + database_link=self.database_link, options=feed_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def query_containers( self, query=None, @@ -357,6 +369,7 @@ def query_containers( populate_query_metrics=None, feed_options=None, response_hook=None, + **kwargs ): # type: (str, List, int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable """List properties for containers in the current database @@ -387,11 +400,13 @@ def query_containers( database_link=self.database_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace def replace_container( self, container, # type: Union[str, Container, Dict[str, Any]] @@ -405,6 +420,7 @@ def replace_container( populate_query_metrics=None, # type: bool request_options=None, # type: Dict[str, Any] response_hook=None, # type: Optional[Callable] + **kwargs ): # type: (...) -> Container """ Reset the properties of the container. Property changes are persisted immediately. @@ -462,7 +478,7 @@ def replace_container( } container_properties = self.client_connection.ReplaceContainer( - container_link, collection=parameters, options=request_options + container_link, collection=parameters, options=request_options, **kwargs ) if response_hook: @@ -472,7 +488,8 @@ def replace_container( self.client_connection, self.database_link, container_properties["id"], properties=container_properties ) - def read_all_users(self, max_item_count=None, feed_options=None, response_hook=None): + @distributed_trace + def read_all_users(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable """ List all users in the container. @@ -487,12 +504,15 @@ def read_all_users(self, max_item_count=None, feed_options=None, response_hook=N if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - result = self.client_connection.ReadUsers(database_link=self.database_link, options=feed_options) + result = self.client_connection.ReadUsers( + database_link=self.database_link, options=feed_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result - def query_users(self, query, parameters=None, max_item_count=None, feed_options=None, response_hook=None): + @distributed_trace + def query_users(self, query, parameters=None, max_item_count=None, feed_options=None, response_hook=None, **kwargs): # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable """Return all users matching the given `query`. @@ -513,6 +533,7 @@ def query_users(self, query, parameters=None, max_item_count=None, feed_options= database_link=self.database_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) @@ -538,7 +559,8 @@ def get_user_client(self, user): return User(client_connection=self.client_connection, id=id_value, database_link=self.database_link) - def create_user(self, body, request_options=None, response_hook=None): + @distributed_trace + def create_user(self, body, request_options=None, response_hook=None, **kwargs): # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User """ Create a user in the container. @@ -563,7 +585,8 @@ def create_user(self, body, request_options=None, response_hook=None): if not request_options: request_options = {} # type: Dict[str, Any] - user = self.client_connection.CreateUser(database_link=self.database_link, user=body, options=request_options) + user = self.client_connection.CreateUser( + database_link=self.database_link, user=body, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, user) @@ -572,7 +595,8 @@ def create_user(self, body, request_options=None, response_hook=None): client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) - def upsert_user(self, body, request_options=None, response_hook=None): + @distributed_trace + def upsert_user(self, body, request_options=None, response_hook=None, **kwargs): # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User """ Insert or update the specified user. @@ -588,7 +612,9 @@ def upsert_user(self, body, request_options=None, response_hook=None): if not request_options: request_options = {} # type: Dict[str, Any] - user = self.client_connection.UpsertUser(database_link=self.database_link, user=body, options=request_options) + user = self.client_connection.UpsertUser( + database_link=self.database_link, user=body, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, user) @@ -597,7 +623,8 @@ def upsert_user(self, body, request_options=None, response_hook=None): client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) - def replace_user(self, user, body, request_options=None, response_hook=None): + @distributed_trace + def replace_user(self, user, body, request_options=None, response_hook=None, **kwargs): # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User """ Replaces the specified user if it exists in the container. @@ -614,7 +641,7 @@ def replace_user(self, user, body, request_options=None, response_hook=None): request_options = {} # type: Dict[str, Any] user = self.client_connection.ReplaceUser( - user_link=self._get_user_link(user), user=body, options=request_options + user_link=self._get_user_link(user), user=body, options=request_options, **kwargs ) if response_hook: @@ -624,7 +651,8 @@ def replace_user(self, user, body, request_options=None, response_hook=None): client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) - def delete_user(self, user, request_options=None, response_hook=None): + @distributed_trace + def delete_user(self, user, request_options=None, response_hook=None, **kwargs): # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Optional[Callable]) -> None """ Delete the specified user from the container. @@ -639,11 +667,14 @@ def delete_user(self, user, request_options=None, response_hook=None): if not request_options: request_options = {} # type: Dict[str, Any] - result = self.client_connection.DeleteUser(user_link=self._get_user_link(user), options=request_options) + result = self.client_connection.DeleteUser( + user_link=self._get_user_link(user), options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) - def read_offer(self, response_hook=None): + @distributed_trace + def read_offer(self, response_hook=None, **kwargs): # type: (Optional[Callable]) -> Offer """ Read the Offer object for this database. @@ -658,7 +689,7 @@ def read_offer(self, response_hook=None): "query": "SELECT * FROM root r WHERE r.resource=@link", "parameters": [{"name": "@link", "value": link}], } - offers = list(self.client_connection.QueryOffers(query_spec)) + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for database " + self.database_link) @@ -667,7 +698,8 @@ def read_offer(self, response_hook=None): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) - def replace_throughput(self, throughput, response_hook=None): + @distributed_trace + def replace_throughput(self, throughput, response_hook=None, **kwargs): # type: (int, Optional[Callable]) -> Offer """ Replace the database level throughput. @@ -688,7 +720,7 @@ def replace_throughput(self, throughput, response_hook=None): raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for collection " + self.database_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput - data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0]) + data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, data) return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py index ab6a59a2277c..b33a36fc81eb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py @@ -25,6 +25,7 @@ from typing import Any, List, Dict, Union, cast import six +from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection import CosmosClientConnection from .permission import Permission @@ -54,7 +55,8 @@ def _get_properties(self): self.read() return self._properties - def read(self, request_options=None, response_hook=None): + @distributed_trace + def read(self, request_options=None, response_hook=None, **kwargs): # type: (Dict[str, Any], Optional[Callable]) -> User """ Read user propertes. @@ -68,14 +70,15 @@ def read(self, request_options=None, response_hook=None): if not request_options: request_options = {} # type: Dict[str, Any] - self._properties = self.client_connection.ReadUser(user_link=self.user_link, options=request_options) + self._properties = self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) return self._properties - def read_all_permissions(self, max_item_count=None, feed_options=None, response_hook=None): + @distributed_trace + def read_all_permissions(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable """ List all permission for the user. @@ -90,14 +93,23 @@ def read_all_permissions(self, max_item_count=None, feed_options=None, response_ if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - result = self.client_connection.ReadPermissions(user_link=self.user_link, options=feed_options) + result = self.client_connection.ReadPermissions(user_link=self.user_link, options=feed_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result - def query_permissions(self, query, parameters=None, max_item_count=None, feed_options=None, response_hook=None): + @distributed_trace + def query_permissions( + self, + query, + parameters=None, + max_item_count=None, + feed_options=None, + response_hook=None, + **kwargs + ): # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable """Return all permissions matching the given `query`. @@ -118,6 +130,7 @@ def query_permissions(self, query, parameters=None, max_item_count=None, feed_op user_link=self.user_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) if response_hook: @@ -125,7 +138,8 @@ def query_permissions(self, query, parameters=None, max_item_count=None, feed_op return result - def get_permission(self, permission, request_options=None, response_hook=None): + @distributed_trace + def get_permission(self, permission, request_options=None, response_hook=None, **kwargs): # type: (str, Dict[str, Any], Optional[Callable]) -> Permission """ Get the permission identified by `id`. @@ -142,7 +156,7 @@ def get_permission(self, permission, request_options=None, response_hook=None): request_options = {} # type: Dict[str, Any] permission = self.client_connection.ReadPermission( - permission_link=self._get_permission_link(permission), options=request_options + permission_link=self._get_permission_link(permission), options=request_options, **kwargs ) if response_hook: @@ -156,7 +170,8 @@ def get_permission(self, permission, request_options=None, response_hook=None): properties=permission, ) - def create_permission(self, body, request_options=None, response_hook=None): + @distributed_trace + def create_permission(self, body, request_options=None, response_hook=None, **kwargs): # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission """ Create a permission for the user. @@ -173,7 +188,7 @@ def create_permission(self, body, request_options=None, response_hook=None): request_options = {} # type: Dict[str, Any] permission = self.client_connection.CreatePermission( - user_link=self.user_link, permission=body, options=request_options + user_link=self.user_link, permission=body, options=request_options, **kwargs ) if response_hook: @@ -187,7 +202,8 @@ def create_permission(self, body, request_options=None, response_hook=None): properties=permission, ) - def upsert_permission(self, body, request_options=None, response_hook=None): + @distributed_trace + def upsert_permission(self, body, request_options=None, response_hook=None, **kwargs): # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission """ Insert or update the specified permission. @@ -204,7 +220,7 @@ def upsert_permission(self, body, request_options=None, response_hook=None): request_options = {} # type: Dict[str, Any] permission = self.client_connection.UpsertPermission( - user_link=self.user_link, permission=body, options=request_options + user_link=self.user_link, permission=body, options=request_options, **kwargs ) if response_hook: @@ -218,7 +234,8 @@ def upsert_permission(self, body, request_options=None, response_hook=None): properties=permission, ) - def replace_permission(self, permission, body, request_options=None, response_hook=None): + @distributed_trace + def replace_permission(self, permission, body, request_options=None, response_hook=None, **kwargs): # type: (str, Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission """ Replaces the specified permission if it exists for the user. @@ -235,7 +252,7 @@ def replace_permission(self, permission, body, request_options=None, response_ho request_options = {} # type: Dict[str, Any] permission = self.client_connection.ReplacePermission( - permission_link=self._get_permission_link(permission), permission=body, options=request_options + permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs ) if response_hook: @@ -249,7 +266,8 @@ def replace_permission(self, permission, body, request_options=None, response_ho properties=permission, ) - def delete_permission(self, permission, request_options=None, response_hook=None): + @distributed_trace + def delete_permission(self, permission, request_options=None, response_hook=None, **kwargs): # type: (str, Dict[str, Any], Optional[Callable]) -> None """ Delete the specified permission from the user. @@ -266,7 +284,7 @@ def delete_permission(self, permission, request_options=None, response_hook=None request_options = {} # type: Dict[str, Any] result = self.client_connection.DeletePermission( - permission_link=self._get_permission_link(permission), options=request_options + permission_link=self._get_permission_link(permission), options=request_options, **kwargs ) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/version.py b/sdk/cosmos/azure-cosmos/azure/cosmos/version.py new file mode 100644 index 000000000000..ea688752d60b --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/version.py @@ -0,0 +1,22 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +VERSION = "4.0.0b2" diff --git a/sdk/cosmos/azure-cosmos/setup.py b/sdk/cosmos/azure-cosmos/setup.py index 1f4f8c94d778..a2bc049eaf9d 100644 --- a/sdk/cosmos/azure-cosmos/setup.py +++ b/sdk/cosmos/azure-cosmos/setup.py @@ -7,7 +7,7 @@ # pylint:disable=missing-docstring import re -import os.path +import os from io import open from setuptools import find_packages, setup @@ -20,6 +20,10 @@ # a-b-c => a.b.c NAMESPACE_NAME = PACKAGE_NAME.replace("-", ".") +# Version extraction inspired from 'requests' +with open(os.path.join(PACKAGE_FOLDER_PATH, 'version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) with open("README.md", encoding="utf-8") as f: README = f.read() @@ -28,7 +32,7 @@ setup( name=PACKAGE_NAME, - version='4.0.0b1', + version=version, description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=README + "\n\n" + HISTORY, long_description_content_type="text/markdown", @@ -66,7 +70,7 @@ ), install_requires=[ 'six >=1.6', - 'requests>=2.18.4' + 'azure-core<2.0.0,>=1.0.0b2' ], extras_require={ ":python_version<'3.0'": ["azure-nspkg"], diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 43a1d2096d24..0f093271d7c3 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -1946,7 +1946,7 @@ def __get_first(array): root_included_path = __get_first([included_path for included_path in indexing_policy['includedPaths'] if included_path['path'] == '/*']) - self.assertFalse('indexes' in root_included_path) + self.assertFalse(root_included_path.get('indexes')) def test_client_request_timeout(self): connection_policy = documents.ConnectionPolicy() @@ -2565,8 +2565,8 @@ def test_get_resource_with_dictionary_and_object(self): self.assertEquals(read_permission.id, created_permission.id) def _MockExecuteFunction(self, function, *args, **kwargs): - self.last_headers.append(args[5]['headers'][HttpHeaders.PartitionKey] - if HttpHeaders.PartitionKey in args[5]['headers'] else '') + self.last_headers.append(args[4].headers[HttpHeaders.PartitionKey] + if HttpHeaders.PartitionKey in args[4].headers else '') return self.OriginalExecuteFunction(function, *args, **kwargs) if __name__ == '__main__': diff --git a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py index 11c740244dcc..e58dff33a4ae 100644 --- a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py +++ b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py @@ -123,8 +123,8 @@ def _MockExecuteFunction(self, function, *args, **kwargs): return {constants._Constants.EnableMultipleWritableLocations: self.EnableMultipleWritableLocations}, {} else: if len(args) > 0: - self.last_headers.append(HttpHeaders.AllowTentativeWrites in args[5]['headers'] - and args[5]['headers'][HttpHeaders.AllowTentativeWrites] == 'true') + self.last_headers.append(HttpHeaders.AllowTentativeWrites in args[4].headers + and args[4].headers[HttpHeaders.AllowTentativeWrites] == 'true') return self.OriginalExecuteFunction(function, *args, **kwargs) diff --git a/sdk/cosmos/azure-cosmos/test/proxy_tests.py b/sdk/cosmos/azure-cosmos/test/proxy_tests.py index 552f76ee301c..09857ebf84f5 100644 --- a/sdk/cosmos/azure-cosmos/test/proxy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/proxy_tests.py @@ -30,7 +30,7 @@ else: from http.server import BaseHTTPRequestHandler, HTTPServer from threading import Thread -from requests.exceptions import ProxyError +from azure.core.exceptions import ServiceRequestError pytestmark = pytest.mark.cosmosEmulator @@ -104,7 +104,7 @@ def test_failure_with_wrong_proxy(self): client = cosmos_client_connection.CosmosClientConnection(self.host, {'masterKey': self.masterKey}, connection_policy) self.fail("Client instantiation is not expected") except Exception as e: - self.assertTrue(type(e) is ProxyError, msg="Error is not a ProxyError") + self.assertTrue(type(e) is ServiceRequestError, msg="Error is not a ServiceRequestError") if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] diff --git a/sdk/cosmos/azure-cosmos/test/session_tests.py b/sdk/cosmos/azure-cosmos/test/session_tests.py index cdd0799dafd5..f3e7e3e9fe93 100644 --- a/sdk/cosmos/azure-cosmos/test/session_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_tests.py @@ -37,12 +37,12 @@ def setUpClass(cls): cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) - def _MockRequest(self, global_endpoint_manager, request, connection_policy, requests_session, path, request_options, request_body): - if HttpHeaders.SessionToken in request_options['headers']: - self.last_session_token_sent = request_options['headers'][HttpHeaders.SessionToken] + def _MockRequest(self, global_endpoint_manager, request_params, connection_policy, pipeline_client, request): + if HttpHeaders.SessionToken in request.headers: + self.last_session_token_sent = request.headers[HttpHeaders.SessionToken] else: self.last_session_token_sent = None - return self._OriginalRequest(global_endpoint_manager, request, connection_policy, requests_session, path, request_options, request_body) + return self._OriginalRequest(global_endpoint_manager, request_params, connection_policy, pipeline_client, request) def test_session_token_not_sent_for_master_resource_ops (self): self._OriginalRequest = synchronized_request._Request From dc7d9e82563f84f528c13b97053e541099c08d48 Mon Sep 17 00:00:00 2001 From: annatisch Date: Mon, 9 Sep 2019 14:06:50 -0700 Subject: [PATCH 16/29] [Cosmos] Applying track 2 SDK guidelines (#7021) * Updated dependencies * Added core pipeline * Ignore test config * Fixed indexes test * Refactored request creation * Fixed index test * Added trace decorators * Bumped version * Updated policies * Renamed request_options -> request_params * Renamed clients * Updated with azure-core errors * Fixed test warnings * Updated config * PR fixes * Fixed init user import * Fixed init clients * Started revising constructors * Test conn str constructor * Update iterables with core paging * Added context managers * Reverted storage changes * Updated constructor * Mypy and Pylint * Renamed all listing operations * Some mypy fixes * Cleaned up method signatures * Fix pylint * Propagate kwargs * Fix pylint * Some mypy fixes * Updated readme and release notes * Fix for passing in extra headers * Reverted credentials * Review feedback * Fix pylint * Fixed samples * Updated docstrings * Fixed whitespace and imports * Some mypy fixes * Mypy fixes * Removed continuation token support * Pylint fix * Docs tweaks * Updated continuation token * Updated response header --- sdk/cosmos/azure-cosmos/HISTORY.md | 42 ++ sdk/cosmos/azure-cosmos/README.md | 40 +- sdk/cosmos/azure-cosmos/azure/__init__.py | 2 +- .../azure-cosmos/azure/cosmos/__init__.py | 18 +- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 40 ++ .../azure/cosmos/_cosmos_client_connection.py | 510 ++++++++++-------- .../azure/cosmos/_default_retry_policy.py | 2 +- .../_endpoint_discovery_retry_policy.py | 2 +- .../_execution_context/document_producer.py | 3 +- .../execution_dispatcher.py | 8 +- .../azure/cosmos/_global_endpoint_manager.py | 4 +- .../azure/cosmos/_query_iterable.py | 92 ++-- .../cosmos/_resource_throttle_retry_policy.py | 2 +- .../azure/cosmos/_retry_utility.py | 2 +- .../azure/cosmos/_runtime_constants.py | 2 +- .../azure-cosmos/azure/cosmos/_session.py | 10 +- .../azure/cosmos/_session_retry_policy.py | 2 +- .../azure/cosmos/_synchronized_request.py | 27 +- .../azure-cosmos/azure/cosmos/_utils.py | 10 +- .../azure/cosmos/_vector_session_token.py | 18 +- .../azure-cosmos/azure/cosmos/container.py | 430 +++++++-------- .../azure/cosmos/cosmos_client.py | 377 ++++++++----- .../azure-cosmos/azure/cosmos/database.py | 427 +++++++-------- .../azure-cosmos/azure/cosmos/errors.py | 56 +- .../azure/cosmos/http_constants.py | 29 +- sdk/cosmos/azure-cosmos/azure/cosmos/offer.py | 1 + .../azure-cosmos/azure/cosmos/permission.py | 5 +- .../azure-cosmos/azure/cosmos/scripts.py | 327 +++++------ sdk/cosmos/azure-cosmos/azure/cosmos/user.py | 165 +++--- .../samples/ChangeFeedManagement/Program.py | 25 +- .../samples/CollectionManagement/Program.py | 97 ++-- .../samples/DatabaseManagement/Program.py | 25 +- .../samples/DocumentManagement/Program.py | 23 +- .../samples/IndexManagement/Program.py | 269 +++------ .../MultiMasterOperations/ConflictWorker.py | 81 ++- .../MultiMasterScenario.py | 7 +- .../samples/MultiMasterOperations/Worker.py | 10 +- .../Program.py | 39 +- .../azure-cosmos/test/aggregate_tests.py | 16 +- sdk/cosmos/azure-cosmos/test/conftest.py | 7 +- sdk/cosmos/azure-cosmos/test/crud_tests.py | 158 +++--- .../azure-cosmos/test/encoding_tests.py | 2 +- sdk/cosmos/azure-cosmos/test/env_test.py | 2 +- .../azure-cosmos/test/globaldb_mock_tests.py | 12 +- .../azure-cosmos/test/globaldb_tests.py | 38 +- .../azure-cosmos/test/location_cache_tests.py | 18 +- .../azure-cosmos/test/multiOrderbyTests.py | 2 +- .../azure-cosmos/test/multimaster_tests.py | 4 +- sdk/cosmos/azure-cosmos/test/orderby_tests.py | 30 +- .../azure-cosmos/test/partition_key_tests.py | 26 +- .../test/query_execution_context_tests.py | 6 +- sdk/cosmos/azure-cosmos/test/query_tests.py | 32 +- .../azure-cosmos/test/retry_policy_tests.py | 26 +- .../azure-cosmos/test/routing_map_tests.py | 2 +- .../test/session_container_tests.py | 2 +- sdk/cosmos/azure-cosmos/test/session_tests.py | 14 +- .../test/session_token_unit_tests.py | 8 +- .../test/streaming_failover_test.py | 13 +- sdk/cosmos/azure-cosmos/test/test_config.py | 16 +- sdk/cosmos/azure-cosmos/test/ttl_tests.py | 4 +- sdk/cosmos/azure-cosmos/test/utils_tests.py | 18 +- 61 files changed, 1855 insertions(+), 1830 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/HISTORY.md b/sdk/cosmos/azure-cosmos/HISTORY.md index 28da1d733e79..a72609a1be0a 100644 --- a/sdk/cosmos/azure-cosmos/HISTORY.md +++ b/sdk/cosmos/azure-cosmos/HISTORY.md @@ -1,4 +1,46 @@ # Change Log azure-cosmos + +## Version 4.0.0b2: + +Version 4.0.0b2 is the second iteration in our efforts to build a more Pythonic client library. + +**Breaking changes** + +- The client connection has been adapted to consume the HTTP pipeline defined in `azure.core.pipeline`. +- Interactive objects have now been renamed as proxies. This includes: + - `Database` -> `DatabaseProxy` + - `User` -> `UserProxy` + - `Container` -> `ContainerProxy` + - `Scripts` -> `ScriptsProxy` +- The constructor of `CosmosClient` has been updated: + - The `auth` parameter has been renamed to `credential` and will now take an authentication type directly. This means the master key value, a dictionary of resource tokens, or a list of permissions can be passed in. However the old dictionary format is still supported. + - The `connection_policy` parameter has been made a keyword only parameter, and while it is still supported, each of the individual attributes of the policy can now be passed in as explicit keyword arguments: + - `request_timeout` + - `media_request_timeout` + - `connection_mode` + - `media_read_mode` + - `proxy_config` + - `enable_endpoint_discovery` + - `preferred_locations` + - `multiple_write_locations` +- A new classmethod constructor has been added to `CosmosClient` to enable creation via a connection string retrieved from the Azure portal. +- Some `read_all` operations have been renamed to `list` operations: + - `CosmosClient.read_all_databases` -> `CosmosClient.list_databases` + - `Container.read_all_conflicts` -> `ContainerProxy.list_conflicts` + - `Database.read_all_containers` -> `DatabaseProxy.list_containers` + - `Database.read_all_users` -> `DatabaseProxy.list_users` + - `User.read_all_permissions` -> `UserProxy.list_permissions` +- All operations that take `request_options` or `feed_options` parameters, these have been moved to keyword only parameters. In addition, while these options dictionaries are still supported, each of the individual options within the dictionary are now supported as explicit keyword arguments. +- The error heirarchy is now inherited from `azure.core.AzureError` instead of `CosmosError` which has been removed. + - `HTTPFailure` has been renamed to `CosmosHttpResponseError` + - `JSONParseFailure` has been removed and replaced by `azure.core.DecodeError` + - Added additional errors for specific response codes: + - `CosmosResourceNotFoundError` for status 404 + - `CosmosResourceExistsError` for status 409 + - `CosmosAccessConditionFailedError` for status 412 +- `CosmosClient` can now be run in a context manager to handle closing the client connection. +- Iterable responses (e.g. query responses and list responses) are now of type `azure.core.paging.ItemPaged`. The method `fetch_next_block` has been replaced by a secondary iterator, accessed by the `by_page` method. + ## Version 4.0.0b1: Version 4.0.0b1 is the first preview of our efforts to create a user-friendly and Pythonic client library for Azure Cosmos. For more information about this, and preview releases of other Azure SDK libraries, please visit https://aka.ms/azure-sdk-preview1-python. diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index b3ca6d2ab67d..84bf724222dd 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -63,23 +63,21 @@ export ACCOUNT_KEY=$(az cosmosdb list-keys --resource-group $RES_GROUP --name $A Once you've populated the `ACCOUNT_URI` and `ACCOUNT_KEY` environment variables, you can create the [CosmosClient][ref_cosmosclient]. ```Python -from azure.cosmos import HTTPFailure, CosmosClient, Container, Database, PartitionKey +from azure.cosmos import CosmosClient, Container, Database, PartitionKey, errors import os url = os.environ['ACCOUNT_URI'] key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, auth = { - 'masterKey': key -}) +client = CosmosClient(url, credential=key) ``` ## Usage Once you've initialized a [CosmosClient][ref_cosmosclient], you can interact with the primary resource types in Cosmos DB: -* [Database][ref_database]: A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [Database][ref_database] object to manage its containers. +* [Database][ref_database]: A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [DatabaseProxy][ref_database] object to manage its containers. -* [Container][ref_container]: A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [Container][ref_container] object. +* [Container][ref_container]: A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [ContainerProxy][ref_container] object. * [Item][ref_item]: An Item is the dictionary-like representation of a JSON document stored in a container. Each Item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. @@ -106,9 +104,7 @@ After authenticating your [CosmosClient][ref_cosmosclient], you can work with an database_name = 'testDatabase' try: database = client.create_database(database_name) -except HTTPFailure as e: - if e.status_code != 409: - raise +except errors.CosmosResourceExistsError: database = client.get_database_client(database_name) ``` @@ -120,13 +116,13 @@ This example creates a container with default settings. If a container with the container_name = 'products' try: container = database.create_container(id=container_name, partition_key=PartitionKey(path="/productName")) -except HTTPFailure as e: - if e.status_code != 409: - raise +except errors.CosmosResourceExistsError: container = database.get_container_client(container_name) +except errors.CosmosHttpResponseError: + raise ``` -The preceding snippet also handles the [HTTPFailure][ref_httpfailure] exception if the container creation failed. For more information on error handling and troubleshooting, see the [Troubleshooting](#troubleshooting) section. +The preceding snippet also handles the [CosmosHttpResponseError][ref_httpfailure] exception if the container creation failed. For more information on error handling and troubleshooting, see the [Troubleshooting](#troubleshooting) section. ### Get an existing container @@ -139,7 +135,7 @@ container = database.get_container_client(container_name) ### Insert data -To insert items into a container, pass a dictionary containing your data to [Container.upsert_item][ref_container_upsert_item]. Each item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. +To insert items into a container, pass a dictionary containing your data to [ContainerProxy.upsert_item][ref_container_upsert_item]. Each item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. This example inserts several items into the container, each with a unique `id`: @@ -158,7 +154,7 @@ for i in range(1, 10): ### Delete data -To delete items from a container, use [Container.delete_item][ref_container_delete_item]. The SQL API in Cosmos DB does not support the SQL `DELETE` statement. +To delete items from a container, use [ContainerProxy.delete_item][ref_container_delete_item]. The SQL API in Cosmos DB does not support the SQL `DELETE` statement. ```Python for item in container.query_items(query='SELECT * FROM products p WHERE p.productModel = "DISCONTINUED"', @@ -168,7 +164,7 @@ for item in container.query_items(query='SELECT * FROM products p WHERE p.produc ### Query the database -A Cosmos DB SQL API database supports querying the items in a container with [Container.query_items][ref_container_query_items] using SQL-like syntax. +A Cosmos DB SQL API database supports querying the items in a container with [ContainerProxy.query_items][ref_container_query_items] using SQL-like syntax. This example queries a container for items with a specific `id`: @@ -186,7 +182,7 @@ for item in container.query_items( > NOTE: Although you can specify any value for the container name in the `FROM` clause, we recommend you use the container name for consistency. -Perform parameterized queries by passing a dictionary containing the parameters and their values to [Container.query_items][ref_container_query_items]: +Perform parameterized queries by passing a dictionary containing the parameters and their values to [ContainerProxy.query_items][ref_container_query_items]: ```Python discontinued_items = container.query_items( @@ -243,13 +239,11 @@ For example, if you try to create a container using an ID (name) that's already ```Python try: database.create_container(id=container_name, partition_key=PartitionKey(path="/productName") -except HTTPFailure as e: - if e.status_code == 409: - print("""Error creating container. +except errors.CosmosResourceExistsError: + print("""Error creating container HTTP status code 409: The ID (name) provided for the container is already in use. The container name must be unique within the database.""") - else: - raise + ``` ## More sample code @@ -285,7 +279,7 @@ For more extensive documentation on the Cosmos DB service, see the [Azure Cosmos [ref_cosmosclient_create_database]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.CosmosClient.create_database [ref_cosmosclient]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.CosmosClient [ref_database]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.Database -[ref_httpfailure]: https://docs.microsoft.com/python/api/azure-cosmos/azure.cosmos.errors.httpfailure +[ref_httpfailure]: https://docs.microsoft.com/python/api/azure-cosmos/azure.cosmos.errors.CosmosHttpResponseError [ref_item]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.Item [sample_database_mgmt]: https://github.com/binderjoe/cosmos-python-prototype/blob/master/examples/databasemanagementsample.py [sample_document_mgmt]: https://github.com/binderjoe/cosmos-python-prototype/blob/master/examples/documentmanagementsample.py diff --git a/sdk/cosmos/azure-cosmos/azure/__init__.py b/sdk/cosmos/azure-cosmos/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/cosmos/azure-cosmos/azure/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 07f3ca79fb93..93920f280f41 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -19,9 +19,11 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from .container import Container +from .container import ContainerProxy from .cosmos_client import CosmosClient -from .database import Database +from .database import DatabaseProxy +from .user import UserProxy +from .scripts import ScriptsProxy from .documents import ( ConsistencyLevel, DataType, @@ -35,18 +37,16 @@ ) from .partition_key import PartitionKey from .permission import Permission -from .scripts import Scripts -from .user import User from .version import VERSION __all__ = ( - "Container", "CosmosClient", - "Database", + "DatabaseProxy", + "ContainerProxy", "PartitionKey", "Permission", - "Scripts", - "User", + "ScriptsProxy", + "UserProxy", "ConsistencyLevel", "DataType", "IndexKind", @@ -57,4 +57,4 @@ "TriggerOperation", "TriggerType", ) -__version__ = VERSION \ No newline at end of file +__version__ = VERSION diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 148e73a3a16c..265d108e178b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -27,6 +27,7 @@ import json import uuid import binascii +from typing import Dict, Any import six from six.moves.urllib.parse import quote as urllib_quote @@ -39,6 +40,45 @@ # pylint: disable=protected-access +_COMMON_OPTIONS = { + 'initial_headers': 'initialHeaders', + 'pre_trigger_include': 'preTriggerInclude', + 'post_trigger_include': 'postTriggerInclude', + 'max_item_count': 'maxItemCount', + 'access_condition': 'accessCondition', + 'indexing_directive': 'indexingDirective', + 'consistency_level': 'consistencyLevel', + 'session_token': 'sessionToken', + 'enable_scan_in_query': 'enableScanInQuery', + 'resource_token_expiry_seconds': 'resourceTokenExpirySeconds', + 'offer_type': 'offerType', + 'offer_throughput': 'offerThroughput', + 'partition_key': 'partitionKey', + 'enable_cross_partition_query': 'enableCrossPartitionQuery', + 'populate_query_metrics': 'populateQueryMetrics', + 'enable_script_logging': 'enableScriptLogging', + 'offer_enable_ru_per_minute_throughput': 'offerEnableRUPerMinuteThroughput', + 'disable_ru_per_minute_usage': 'disableRUPerMinuteUsage', + 'change_feed': 'changeFeed', + 'continuation': 'continuation', + 'is_start_from_beginning': 'isStartFromBeginning', + 'populate_partition_key_range_statistics': 'populatePartitionKeyRangeStatistics', + 'populate_quota_info': 'populateQuotaInfo' +} + +def build_options(kwargs): + # type: (Dict[str, Any]) -> Dict[str, Any] + options = kwargs.pop('request_options', kwargs.pop('feed_options', {})) + for key, value in _COMMON_OPTIONS.items(): + if key in kwargs: + options[value] = kwargs.pop(key) + + if 'if_match' in kwargs: + options['accessCondition'] = {'type': 'IfMatch', 'condition': kwargs.pop('if_match')} + if 'if_none_match' in kwargs: + options['accessCondition'] = {'type': 'IfNoneMatch', 'condition': kwargs.pop('if_none_match')} + return options + def GetHeaders( # pylint: disable=too-many-statements,too-many-branches cosmos_client_connection, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 23c5c4c432d7..1fa3d3db9934 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -24,22 +24,22 @@ """Document client class for the Azure Cosmos database service. """ -import platform - -import requests +from typing import Dict, Any, Optional import six -from azure.core import PipelineClient -from azure.core.pipeline.policies import ( +from azure.core.paging import ItemPaged # type: ignore +from azure.core import PipelineClient # type: ignore +from azure.core.pipeline.policies import ( # type: ignore ContentDecodePolicy, HeadersPolicy, UserAgentPolicy, NetworkTraceLoggingPolicy, CustomHookPolicy, ProxyPolicy) -from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy +from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy # type: ignore from . import _base as base from . import documents +from .documents import ConnectionPolicy from . import _constants as constants from . import http_constants from . import _query_iterable as query_iterable @@ -51,7 +51,6 @@ from . import _session from . import _utils from .partition_key import _Undefined, _Empty -from .version import VERSION # pylint: disable=protected-access @@ -81,8 +80,14 @@ class _QueryCompatibilityMode: _DefaultStringRangePrecision = -1 def __init__( - self, url_connection, auth, connection_policy=None, consistency_level=documents.ConsistencyLevel.Session + self, + url_connection, # type: str + auth, # type: Dict[str, Any] + connection_policy=None, # type: Optional[ConnectionPolicy] + consistency_level=documents.ConsistencyLevel.Session, # type: str + **kwargs # type: Any ): + # type: (...) -> None """ :param str url_connection: The URL for connecting to the DB server. @@ -112,16 +117,15 @@ def __init__( id_ = resource_parts[-1] self.resource_tokens[id_] = permission_feed["_token"] - self.connection_policy = connection_policy or documents.ConnectionPolicy() + self.connection_policy = connection_policy or ConnectionPolicy() - self.partition_resolvers = {} + self.partition_resolvers = {} # type: Dict[str, Any] - self.partition_key_definition_cache = {} + self.partition_key_definition_cache = {} # type: Dict[str, Any] self.default_headers = { http_constants.HttpHeaders.CacheControl: "no-cache", http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, - http_constants.HttpHeaders.UserAgent: _utils.get_user_agent(), # For single partition query with aggregate functions we would try to accumulate the results on the SDK. # We need to set continuation as not expected. http_constants.HttpHeaders.IsContinuationExpected: False, @@ -139,30 +143,26 @@ def __init__( # via setter self.session = _session.Session(self.url_connection) else: - self.session = None + self.session = None # type: ignore self._useMultipleWriteLocations = False self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) - proxies = {} + proxies = kwargs.pop('proxies', {}) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: - host = connection_policy.ProxyConfiguration.Host + host = self.connection_policy.ProxyConfiguration.Host url = six.moves.urllib.parse.urlparse(host) - proxy = host if url.port else host + ":" + str(connection_policy.ProxyConfiguration.Port) - proxies = {url.scheme : proxy} - user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform()) + proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) + proxies.update({url.scheme : proxy}) policies = [ - HeadersPolicy(), + HeadersPolicy(**kwargs), ProxyPolicy(proxies=proxies), - UserAgentPolicy(base_user_agent=user_agent), + UserAgentPolicy(base_user_agent=_utils.get_user_agent(), **kwargs), ContentDecodePolicy(), - CustomHookPolicy(), + CustomHookPolicy(**kwargs), DistributedTracingPolicy(), - NetworkTraceLoggingPolicy(), + NetworkTraceLoggingPolicy(**kwargs), ] self.pipeline_client = PipelineClient(url_connection, "empty-config", policies=policies) @@ -235,7 +235,7 @@ def GetPartitionResolver(self, database_link): return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) - def CreateDatabase(self, database, options=None): + def CreateDatabase(self, database, options=None, **kwargs): """Creates a database. :param dict database: @@ -253,9 +253,9 @@ def CreateDatabase(self, database, options=None): CosmosClientConnection.__ValidateResource(database) path = "/dbs" - return self.Create(database, path, "dbs", None, None, options) + return self.Create(database, path, "dbs", None, None, options, **kwargs) - def ReadDatabase(self, database_link, options=None): + def ReadDatabase(self, database_link, options=None, **kwargs): """Reads a database. :param str database_link: @@ -273,9 +273,9 @@ def ReadDatabase(self, database_link, options=None): path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Read(path, "dbs", database_id, None, options) + return self.Read(path, "dbs", database_id, None, options, **kwargs) - def ReadDatabases(self, options=None): + def ReadDatabases(self, options=None, **kwargs): """Reads all databases. :param dict options: @@ -290,9 +290,9 @@ def ReadDatabases(self, options=None): if options is None: options = {} - return self.QueryDatabases(None, options) + return self.QueryDatabases(None, options, **kwargs) - def QueryDatabases(self, query, options=None): + def QueryDatabases(self, query, options=None, **kwargs): """Queries databases. :param (str or dict) query: @@ -309,13 +309,18 @@ def QueryDatabases(self, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed("/dbs", "dbs", "", lambda r: r["Databases"], lambda _, b: b, query, options), + self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReadContainers(self, database_link, options=None): + def ReadContainers(self, database_link, options=None, **kwargs): """Reads all collections in a database. :param str database_link: @@ -331,9 +336,9 @@ def ReadContainers(self, database_link, options=None): if options is None: options = {} - return self.QueryContainers(database_link, None, options) + return self.QueryContainers(database_link, None, options, **kwargs) - def QueryContainers(self, database_link, query, options=None): + def QueryContainers(self, database_link, query, options=None, **kwargs): """Queries collections in a database. :param str database_link: @@ -356,14 +361,17 @@ def QueryContainers(self, database_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "colls", database_id, lambda r: r["DocumentCollections"], lambda _, body: body, query, options + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateContainer(self, database_link, collection, options=None): + def CreateContainer(self, database_link, collection, options=None, **kwargs): """Creates a collection in a database. :param str database_link: @@ -383,9 +391,9 @@ def CreateContainer(self, database_link, collection, options=None): CosmosClientConnection.__ValidateResource(collection) path = base.GetPathFromLink(database_link, "colls") database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Create(collection, path, "colls", database_id, None, options) + return self.Create(collection, path, "colls", database_id, None, options, **kwargs) - def ReplaceContainer(self, collection_link, collection, options=None): + def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): """Replaces a collection and return it. :param str collection_link: @@ -407,9 +415,9 @@ def ReplaceContainer(self, collection_link, collection, options=None): CosmosClientConnection.__ValidateResource(collection) path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Replace(collection, path, "colls", collection_id, None, options) + return self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) - def ReadContainer(self, collection_link, options=None): + def ReadContainer(self, collection_link, options=None, **kwargs): """Reads a collection. :param str collection_link: @@ -428,9 +436,9 @@ def ReadContainer(self, collection_link, options=None): path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Read(path, "colls", collection_id, None, options) + return self.Read(path, "colls", collection_id, None, options, **kwargs) - def CreateUser(self, database_link, user, options=None): + def CreateUser(self, database_link, user, options=None, **kwargs): """Creates a user. :param str database_link: @@ -450,9 +458,9 @@ def CreateUser(self, database_link, user, options=None): options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Create(user, path, "users", database_id, None, options) + return self.Create(user, path, "users", database_id, None, options, **kwargs) - def UpsertUser(self, database_link, user, options=None): + def UpsertUser(self, database_link, user, options=None, **kwargs): """Upserts a user. :param str database_link: @@ -470,7 +478,7 @@ def UpsertUser(self, database_link, user, options=None): options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Upsert(user, path, "users", database_id, None, options) + return self.Upsert(user, path, "users", database_id, None, options, **kwargs) def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(user) @@ -478,7 +486,7 @@ def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable database_id = base.GetResourceIdOrFullNameFromLink(database_link) return database_id, path - def ReadUser(self, user_link, options=None): + def ReadUser(self, user_link, options=None, **kwargs): """Reads a user. :param str user_link: @@ -497,9 +505,9 @@ def ReadUser(self, user_link, options=None): path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Read(path, "users", user_id, None, options) + return self.Read(path, "users", user_id, None, options, **kwargs) - def ReadUsers(self, database_link, options=None): + def ReadUsers(self, database_link, options=None, **kwargs): """Reads all users in a database. :params str database_link: @@ -515,9 +523,9 @@ def ReadUsers(self, database_link, options=None): if options is None: options = {} - return self.QueryUsers(database_link, None, options) + return self.QueryUsers(database_link, None, options, **kwargs) - def QueryUsers(self, database_link, query, options=None): + def QueryUsers(self, database_link, query, options=None, **kwargs): """Queries users in a database. :param str database_link: @@ -540,13 +548,18 @@ def QueryUsers(self, database_link, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed(path, "users", database_id, lambda r: r["Users"], lambda _, b: b, query, options), + self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def DeleteDatabase(self, database_link, options=None): + def DeleteDatabase(self, database_link, options=None, **kwargs): """Deletes a database. :param str database_link: @@ -565,9 +578,9 @@ def DeleteDatabase(self, database_link, options=None): path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.DeleteResource(path, "dbs", database_id, None, options) + return self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) - def CreatePermission(self, user_link, permission, options=None): + def CreatePermission(self, user_link, permission, options=None, **kwargs): """Creates a permission for a user. :param str user_link: @@ -587,9 +600,9 @@ def CreatePermission(self, user_link, permission, options=None): options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Create(permission, path, "permissions", user_id, None, options) + return self.Create(permission, path, "permissions", user_id, None, options, **kwargs) - def UpsertPermission(self, user_link, permission, options=None): + def UpsertPermission(self, user_link, permission, options=None, **kwargs): """Upserts a permission for a user. :param str user_link: @@ -609,7 +622,7 @@ def UpsertPermission(self, user_link, permission, options=None): options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Upsert(permission, path, "permissions", user_id, None, options) + return self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(permission) @@ -617,7 +630,7 @@ def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: dis user_id = base.GetResourceIdOrFullNameFromLink(user_link) return path, user_id - def ReadPermission(self, permission_link, options=None): + def ReadPermission(self, permission_link, options=None, **kwargs): """Reads a permission. :param str permission_link: @@ -636,9 +649,9 @@ def ReadPermission(self, permission_link, options=None): path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Read(path, "permissions", permission_id, None, options) + return self.Read(path, "permissions", permission_id, None, options, **kwargs) - def ReadPermissions(self, user_link, options=None): + def ReadPermissions(self, user_link, options=None, **kwargs): """Reads all permissions for a user. :param str user_link: @@ -655,9 +668,9 @@ def ReadPermissions(self, user_link, options=None): if options is None: options = {} - return self.QueryPermissions(user_link, None, options) + return self.QueryPermissions(user_link, None, options, **kwargs) - def QueryPermissions(self, user_link, query, options=None): + def QueryPermissions(self, user_link, query, options=None, **kwargs): """Queries permissions for a user. :param str user_link: @@ -681,14 +694,16 @@ def QueryPermissions(self, user_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReplaceUser(self, user_link, user, options=None): + def ReplaceUser(self, user_link, user, options=None, **kwargs): """Replaces a user and return it. :param str user_link: @@ -709,9 +724,9 @@ def ReplaceUser(self, user_link, user, options=None): CosmosClientConnection.__ValidateResource(user) path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Replace(user, path, "users", user_id, None, options) + return self.Replace(user, path, "users", user_id, None, options, **kwargs) - def DeleteUser(self, user_link, options=None): + def DeleteUser(self, user_link, options=None, **kwargs): """Deletes a user. :param str user_link: @@ -730,9 +745,9 @@ def DeleteUser(self, user_link, options=None): path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.DeleteResource(path, "users", user_id, None, options) + return self.DeleteResource(path, "users", user_id, None, options, **kwargs) - def ReplacePermission(self, permission_link, permission, options=None): + def ReplacePermission(self, permission_link, permission, options=None, **kwargs): """Replaces a permission and return it. :param str permission_link: @@ -753,9 +768,9 @@ def ReplacePermission(self, permission_link, permission, options=None): CosmosClientConnection.__ValidateResource(permission) path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Replace(permission, path, "permissions", permission_id, None, options) + return self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) - def DeletePermission(self, permission_link, options=None): + def DeletePermission(self, permission_link, options=None, **kwargs): """Deletes a permission. :param str permission_link: @@ -774,9 +789,9 @@ def DeletePermission(self, permission_link, options=None): path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.DeleteResource(path, "permissions", permission_id, None, options) + return self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) - def ReadItems(self, collection_link, feed_options=None, response_hook=None): + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): """Reads all documents in a collection. :param str collection_link: @@ -792,12 +807,20 @@ def ReadItems(self, collection_link, feed_options=None, response_hook=None): if feed_options is None: feed_options = {} - return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook) + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) - def QueryItems(self, database_or_Container_link, query, options=None, partition_key=None, response_hook=None): + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): """Queries documents in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param (str or dict) query: :param dict options: @@ -813,20 +836,23 @@ def QueryItems(self, database_or_Container_link, query, options=None, partition_ query_iterable.QueryIterable """ - database_or_Container_link = base.TrimBeginningAndEndingSlashes(database_or_Container_link) + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) if options is None: options = {} - if base.IsDatabaseLink(database_or_Container_link): - # Python doesn't have a good way of specifying an overloaded constructor, - # and this is how it's generally overloaded constructors are specified (by - # calling a @classmethod) and returning the 'self' instance - return query_iterable.QueryIterable.PartitioningQueryIterable( - self, query, options, database_or_Container_link, partition_key + if base.IsDatabaseLink(database_or_container_link): + return ItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable ) - path = base.GetPathFromLink(database_or_Container_link, "docs") - collection_id = base.GetResourceIdOrFullNameFromLink(database_or_Container_link) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) def fetch_fn(options): return ( @@ -839,13 +865,21 @@ def fetch_fn(options): query, options, response_hook=response_hook, + **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, database_or_Container_link) + return ItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) - def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None): + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): """Queries documents change feed in a collection. :param str collection_link: @@ -868,11 +902,11 @@ def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None partition_key_range_id = options["partitionKeyRangeId"] return self._QueryChangeFeed( - collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs ) def _QueryChangeFeed( - self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs ): """Queries change feed of a resource in a collection. @@ -919,13 +953,21 @@ def fetch_fn(options): options, partition_key_range_id, response_hook=response_hook, + **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, None, options, fetch_fn, collection_link) + return ItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) - def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): """Reads Partition Key Ranges. :param str collection_link: @@ -941,9 +983,9 @@ def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): if feed_options is None: feed_options = {} - return self._QueryPartitionKeyRanges(collection_link, None, feed_options) + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) - def _QueryPartitionKeyRanges(self, collection_link, query, options=None): + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): """Queries Partition Key Ranges in a collection. :param str collection_link: @@ -967,17 +1009,20 @@ def _QueryPartitionKeyRanges(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], lambda _, b: b, query, options + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateItem(self, database_or_Container_link, document, options=None): + def CreateItem(self, database_or_container_link, document, options=None, **kwargs): """Creates a document in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param dict document: The Azure Cosmos document to create. @@ -1005,18 +1050,18 @@ def CreateItem(self, database_or_Container_link, document, options=None): # We check the link to be document collection link since it can be database # link in case of client side partitioning - if base.IsItemContainerLink(database_or_Container_link): - options = self._AddPartitionKey(database_or_Container_link, document, options) + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_Container_link, document, options + database_or_container_link, document, options ) - return self.Create(document, path, "docs", collection_id, None, options) + return self.Create(document, path, "docs", collection_id, None, options, **kwargs) - def UpsertItem(self, database_or_Container_link, document, options=None): + def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): """Upserts a document in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param dict document: The Azure Cosmos document to upsert. @@ -1044,13 +1089,13 @@ def UpsertItem(self, database_or_Container_link, document, options=None): # We check the link to be document collection link since it can be database # link in case of client side partitioning - if base.IsItemContainerLink(database_or_Container_link): - options = self._AddPartitionKey(database_or_Container_link, document, options) + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_Container_link, document, options + database_or_container_link, document, options ) - return self.Upsert(document, path, "docs", collection_id, None, options) + return self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) PartitionResolverErrorMessage = ( "Couldn't find any partition resolvers for the database link provided. " @@ -1060,10 +1105,10 @@ def UpsertItem(self, database_or_Container_link, document, options=None): ) # Gets the collection id and path for the document - def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, options): + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): - if not database_or_Container_link: - raise ValueError("database_or_Container_link is None or empty.") + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") if document is None: raise ValueError("document is None.") @@ -1073,10 +1118,10 @@ def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, o if not document.get("id") and not options.get("disableAutomaticIdGeneration"): document["id"] = base.GenerateGuidId() - collection_link = database_or_Container_link + collection_link = database_or_container_link - if base.IsDatabaseLink(database_or_Container_link): - partition_resolver = self.GetPartitionResolver(database_or_Container_link) + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) if partition_resolver is not None: collection_link = partition_resolver.ResolveForCreate(document) @@ -1087,7 +1132,7 @@ def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, o collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, document, path - def ReadItem(self, document_link, options=None): + def ReadItem(self, document_link, options=None, **kwargs): """Reads a document. :param str document_link: @@ -1106,9 +1151,9 @@ def ReadItem(self, document_link, options=None): path = base.GetPathFromLink(document_link) document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.Read(path, "docs", document_id, None, options) + return self.Read(path, "docs", document_id, None, options, **kwargs) - def ReadTriggers(self, collection_link, options=None): + def ReadTriggers(self, collection_link, options=None, **kwargs): """Reads all triggers in a collection. :param str collection_link: @@ -1125,9 +1170,9 @@ def ReadTriggers(self, collection_link, options=None): if options is None: options = {} - return self.QueryTriggers(collection_link, None, options) + return self.QueryTriggers(collection_link, None, options, **kwargs) - def QueryTriggers(self, collection_link, query, options=None): + def QueryTriggers(self, collection_link, query, options=None, **kwargs): """Queries triggers in a collection. :param str collection_link: @@ -1151,14 +1196,16 @@ def QueryTriggers(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateTrigger(self, collection_link, trigger, options=None): + def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): """Creates a trigger in a collection. :param str collection_link: @@ -1177,9 +1224,9 @@ def CreateTrigger(self, collection_link, trigger, options=None): options = {} collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Create(trigger, path, "triggers", collection_id, None, options) + return self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) - def UpsertTrigger(self, collection_link, trigger, options=None): + def UpsertTrigger(self, collection_link, trigger, options=None, **kwargs): """Upserts a trigger in a collection. :param str collection_link: @@ -1198,7 +1245,7 @@ def UpsertTrigger(self, collection_link, trigger, options=None): options = {} collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Upsert(trigger, path, "triggers", collection_id, None, options) + return self.Upsert(trigger, path, "triggers", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(trigger) @@ -1212,7 +1259,7 @@ def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, trigger - def ReadTrigger(self, trigger_link, options=None): + def ReadTrigger(self, trigger_link, options=None, **kwargs): """Reads a trigger. :param str trigger_link: @@ -1231,9 +1278,9 @@ def ReadTrigger(self, trigger_link, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Read(path, "triggers", trigger_id, None, options) + return self.Read(path, "triggers", trigger_id, None, options, **kwargs) - def ReadUserDefinedFunctions(self, collection_link, options=None): + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): """Reads all user defined functions in a collection. :param str collection_link: @@ -1250,9 +1297,9 @@ def ReadUserDefinedFunctions(self, collection_link, options=None): if options is None: options = {} - return self.QueryUserDefinedFunctions(collection_link, None, options) + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) - def QueryUserDefinedFunctions(self, collection_link, query, options=None): + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): """Queries user defined functions in a collection. :param str collection_link: @@ -1276,14 +1323,17 @@ def QueryUserDefinedFunctions(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], lambda _, b: b, query, options + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateUserDefinedFunction(self, collection_link, udf, options=None): + def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): """Creates a user defined function in a collection. :param str collection_link: @@ -1302,9 +1352,9 @@ def CreateUserDefinedFunction(self, collection_link, udf, options=None): options = {} collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Create(udf, path, "udfs", collection_id, None, options) + return self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) - def UpsertUserDefinedFunction(self, collection_link, udf, options=None): + def UpsertUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): """Upserts a user defined function in a collection. :param str collection_link: @@ -1323,7 +1373,7 @@ def UpsertUserDefinedFunction(self, collection_link, udf, options=None): options = {} collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Upsert(udf, path, "udfs", collection_id, None, options) + return self.Upsert(udf, path, "udfs", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(udf) @@ -1337,7 +1387,7 @@ def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disabl collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, udf - def ReadUserDefinedFunction(self, udf_link, options=None): + def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): """Reads a user defined function. :param str udf_link: @@ -1356,9 +1406,9 @@ def ReadUserDefinedFunction(self, udf_link, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Read(path, "udfs", udf_id, None, options) + return self.Read(path, "udfs", udf_id, None, options, **kwargs) - def ReadStoredProcedures(self, collection_link, options=None): + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): """Reads all store procedures in a collection. :param str collection_link: @@ -1375,9 +1425,9 @@ def ReadStoredProcedures(self, collection_link, options=None): if options is None: options = {} - return self.QueryStoredProcedures(collection_link, None, options) + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) - def QueryStoredProcedures(self, collection_link, query, options=None): + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): """Queries stored procedures in a collection. :param str collection_link: @@ -1401,14 +1451,17 @@ def QueryStoredProcedures(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "sprocs", collection_id, lambda r: r["StoredProcedures"], lambda _, b: b, query, options + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateStoredProcedure(self, collection_link, sproc, options=None): + def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): """Creates a stored procedure in a collection. :param str collection_link: @@ -1427,9 +1480,9 @@ def CreateStoredProcedure(self, collection_link, sproc, options=None): options = {} collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Create(sproc, path, "sprocs", collection_id, None, options) + return self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) - def UpsertStoredProcedure(self, collection_link, sproc, options=None): + def UpsertStoredProcedure(self, collection_link, sproc, options=None, **kwargs): """Upserts a stored procedure in a collection. :param str collection_link: @@ -1448,7 +1501,7 @@ def UpsertStoredProcedure(self, collection_link, sproc, options=None): options = {} collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Upsert(sproc, path, "sprocs", collection_id, None, options) + return self.Upsert(sproc, path, "sprocs", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(sproc) @@ -1461,7 +1514,7 @@ def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: di collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, sproc - def ReadStoredProcedure(self, sproc_link, options=None): + def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): """Reads a stored procedure. :param str sproc_link: @@ -1480,9 +1533,9 @@ def ReadStoredProcedure(self, sproc_link, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Read(path, "sprocs", sproc_id, None, options) + return self.Read(path, "sprocs", sproc_id, None, options, **kwargs) - def ReadConflicts(self, collection_link, feed_options=None): + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): """Reads conflicts. :param str collection_link: @@ -1498,9 +1551,9 @@ def ReadConflicts(self, collection_link, feed_options=None): if feed_options is None: feed_options = {} - return self.QueryConflicts(collection_link, None, feed_options) + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) - def QueryConflicts(self, collection_link, query, options=None): + def QueryConflicts(self, collection_link, query, options=None, **kwargs): """Queries conflicts in a collection. :param str collection_link: @@ -1524,14 +1577,17 @@ def QueryConflicts(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "conflicts", collection_id, lambda r: r["Conflicts"], lambda _, b: b, query, options + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReadConflict(self, conflict_link, options=None): + def ReadConflict(self, conflict_link, options=None, **kwargs): """Reads a conflict. :param str conflict_link: @@ -1549,9 +1605,9 @@ def ReadConflict(self, conflict_link, options=None): path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.Read(path, "conflicts", conflict_id, None, options) + return self.Read(path, "conflicts", conflict_id, None, options, **kwargs) - def DeleteContainer(self, collection_link, options=None): + def DeleteContainer(self, collection_link, options=None, **kwargs): """Deletes a collection. :param str collection_link: @@ -1570,9 +1626,9 @@ def DeleteContainer(self, collection_link, options=None): path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.DeleteResource(path, "colls", collection_id, None, options) + return self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) - def ReplaceItem(self, document_link, new_document, options=None): + def ReplaceItem(self, document_link, new_document, options=None, **kwargs): """Replaces a document and returns it. :param str document_link: @@ -1605,9 +1661,9 @@ def ReplaceItem(self, document_link, new_document, options=None): collection_link = base.GetItemContainerLink(document_link) options = self._AddPartitionKey(collection_link, new_document, options) - return self.Replace(new_document, path, "docs", document_id, None, options) + return self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) - def DeleteItem(self, document_link, options=None): + def DeleteItem(self, document_link, options=None, **kwargs): """Deletes a document. :param str document_link: @@ -1626,9 +1682,9 @@ def DeleteItem(self, document_link, options=None): path = base.GetPathFromLink(document_link) document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.DeleteResource(path, "docs", document_id, None, options) + return self.DeleteResource(path, "docs", document_id, None, options, **kwargs) - def CreateAttachment(self, document_link, attachment, options=None): + def CreateAttachment(self, document_link, attachment, options=None, **kwargs): """Creates an attachment in a document. :param str document_link: @@ -1648,9 +1704,9 @@ def CreateAttachment(self, document_link, attachment, options=None): options = {} document_id, path = self._GetItemIdWithPathForAttachment(attachment, document_link) - return self.Create(attachment, path, "attachments", document_id, None, options) + return self.Create(attachment, path, "attachments", document_id, None, options, **kwargs) - def UpsertAttachment(self, document_link, attachment, options=None): + def UpsertAttachment(self, document_link, attachment, options=None, **kwargs): """Upserts an attachment in a document. :param str document_link: @@ -1670,7 +1726,7 @@ def UpsertAttachment(self, document_link, attachment, options=None): options = {} document_id, path = self._GetItemIdWithPathForAttachment(attachment, document_link) - return self.Upsert(attachment, path, "attachments", document_id, None, options) + return self.Upsert(attachment, path, "attachments", document_id, None, options, **kwargs) def _GetItemIdWithPathForAttachment(self, attachment, document_link): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(attachment) @@ -1678,7 +1734,7 @@ def _GetItemIdWithPathForAttachment(self, attachment, document_link): # pylint: document_id = base.GetResourceIdOrFullNameFromLink(document_link) return document_id, path - def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options=None): + def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options=None, **kwargs): """Creates an attachment and upload media. :param str document_link: @@ -1697,9 +1753,9 @@ def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options options = {} document_id, initial_headers, path = self._GetItemIdWithPathForAttachmentMedia(document_link, options) - return self.Create(readable_stream, path, "attachments", document_id, initial_headers, options) + return self.Create(readable_stream, path, "attachments", document_id, initial_headers, options, **kwargs) - def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options=None): + def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options=None, **kwargs): """Upserts an attachment and upload media. :param str document_link: @@ -1718,7 +1774,7 @@ def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options options = {} document_id, initial_headers, path = self._GetItemIdWithPathForAttachmentMedia(document_link, options) - return self.Upsert(readable_stream, path, "attachments", document_id, initial_headers, options) + return self.Upsert(readable_stream, path, "attachments", document_id, initial_headers, options, **kwargs) def _GetItemIdWithPathForAttachmentMedia(self, document_link, options): initial_headers = dict(self.default_headers) @@ -1736,7 +1792,7 @@ def _GetItemIdWithPathForAttachmentMedia(self, document_link, options): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return document_id, initial_headers, path - def ReadAttachment(self, attachment_link, options=None): + def ReadAttachment(self, attachment_link, options=None, **kwargs): """Reads an attachment. :param str attachment_link: @@ -1755,9 +1811,9 @@ def ReadAttachment(self, attachment_link, options=None): path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.Read(path, "attachments", attachment_id, None, options) + return self.Read(path, "attachments", attachment_id, None, options, **kwargs) - def ReadAttachments(self, document_link, options=None): + def ReadAttachments(self, document_link, options=None, **kwargs): """Reads all attachments in a document. :param str document_link: @@ -1774,9 +1830,9 @@ def ReadAttachments(self, document_link, options=None): if options is None: options = {} - return self.QueryAttachments(document_link, None, options) + return self.QueryAttachments(document_link, None, options, **kwargs) - def QueryAttachments(self, document_link, query, options=None): + def QueryAttachments(self, document_link, query, options=None, **kwargs): """Queries attachments in a document. :param str document_link: @@ -1800,12 +1856,15 @@ def QueryAttachments(self, document_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "attachments", document_id, lambda r: r["Attachments"], lambda _, b: b, query, options + path, "attachments", document_id, lambda r: r["Attachments"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) def ReadMedia(self, media_link, **kwargs): """Reads a media. @@ -1876,7 +1935,7 @@ def UpdateMedia(self, media_link, readable_stream, options=None, **kwargs): self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def ReplaceAttachment(self, attachment_link, attachment, options=None): + def ReplaceAttachment(self, attachment_link, attachment, options=None, **kwargs): """Replaces an attachment and returns it. :param str attachment_link: @@ -1897,9 +1956,9 @@ def ReplaceAttachment(self, attachment_link, attachment, options=None): CosmosClientConnection.__ValidateResource(attachment) path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.Replace(attachment, path, "attachments", attachment_id, None, options) + return self.Replace(attachment, path, "attachments", attachment_id, None, options, **kwargs) - def DeleteAttachment(self, attachment_link, options=None): + def DeleteAttachment(self, attachment_link, options=None, **kwargs): """Deletes an attachment. :param str attachment_link: @@ -1918,9 +1977,9 @@ def DeleteAttachment(self, attachment_link, options=None): path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.DeleteResource(path, "attachments", attachment_id, None, options) + return self.DeleteResource(path, "attachments", attachment_id, None, options, **kwargs) - def ReplaceTrigger(self, trigger_link, trigger, options=None): + def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): """Replaces a trigger and returns it. :param str trigger_link: @@ -1947,9 +2006,9 @@ def ReplaceTrigger(self, trigger_link, trigger, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Replace(trigger, path, "triggers", trigger_id, None, options) + return self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) - def DeleteTrigger(self, trigger_link, options=None): + def DeleteTrigger(self, trigger_link, options=None, **kwargs): """Deletes a trigger. :param str trigger_link: @@ -1968,9 +2027,9 @@ def DeleteTrigger(self, trigger_link, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.DeleteResource(path, "triggers", trigger_id, None, options) + return self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) - def ReplaceUserDefinedFunction(self, udf_link, udf, options=None): + def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): """Replaces a user defined function and returns it. :param str udf_link: @@ -1997,9 +2056,9 @@ def ReplaceUserDefinedFunction(self, udf_link, udf, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Replace(udf, path, "udfs", udf_id, None, options) + return self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) - def DeleteUserDefinedFunction(self, udf_link, options=None): + def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): """Deletes a user defined function. :param str udf_link: @@ -2018,7 +2077,7 @@ def DeleteUserDefinedFunction(self, udf_link, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.DeleteResource(path, "udfs", udf_id, None, options) + return self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): """Executes a store procedure. @@ -2054,7 +2113,7 @@ def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) return result - def ReplaceStoredProcedure(self, sproc_link, sproc, options=None): + def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): """Replaces a stored procedure and returns it. :param str sproc_link: @@ -2081,9 +2140,9 @@ def ReplaceStoredProcedure(self, sproc_link, sproc, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Replace(sproc, path, "sprocs", sproc_id, None, options) + return self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) - def DeleteStoredProcedure(self, sproc_link, options=None): + def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): """Deletes a stored procedure. :param str sproc_link: @@ -2102,9 +2161,9 @@ def DeleteStoredProcedure(self, sproc_link, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.DeleteResource(path, "sprocs", sproc_id, None, options) + return self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) - def DeleteConflict(self, conflict_link, options=None): + def DeleteConflict(self, conflict_link, options=None, **kwargs): """Deletes a conflict. :param str conflict_link: @@ -2123,9 +2182,9 @@ def DeleteConflict(self, conflict_link, options=None): path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.DeleteResource(path, "conflicts", conflict_id, None, options) + return self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) - def ReplaceOffer(self, offer_link, offer): + def ReplaceOffer(self, offer_link, offer, **kwargs): """Replaces an offer and returns it. :param str offer_link: @@ -2141,9 +2200,9 @@ def ReplaceOffer(self, offer_link, offer): CosmosClientConnection.__ValidateResource(offer) path = base.GetPathFromLink(offer_link) offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Replace(offer, path, "offers", offer_id, None, None) + return self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) - def ReadOffer(self, offer_link): + def ReadOffer(self, offer_link, **kwargs): """Reads an offer. :param str offer_link: @@ -2157,9 +2216,9 @@ def ReadOffer(self, offer_link): """ path = base.GetPathFromLink(offer_link) offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Read(path, "offers", offer_id, None, {}) + return self.Read(path, "offers", offer_id, None, {}, **kwargs) - def ReadOffers(self, options=None): + def ReadOffers(self, options=None, **kwargs): """Reads all offers. :param dict options: @@ -2174,9 +2233,9 @@ def ReadOffers(self, options=None): if options is None: options = {} - return self.QueryOffers(None, options) + return self.QueryOffers(None, options, **kwargs) - def QueryOffers(self, query, options=None): + def QueryOffers(self, query, options=None, **kwargs): """Query for all offers. :param (str or dict) query: @@ -2194,11 +2253,15 @@ def QueryOffers(self, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed("/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options), + self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) def GetDatabaseAccount(self, url_connection=None, **kwargs): """Gets database account info. @@ -2394,12 +2457,12 @@ def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs) return result - def __Get(self, path, request_params, headers, **kwargs): + def __Get(self, path, request_params, req_headers, **kwargs): """Azure Cosmos 'GET' http request. :params str url: :params str path: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2407,7 +2470,7 @@ def __Get(self, path, request_params, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.get(url=path, headers=headers) + request = self.pipeline_client.get(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2419,13 +2482,13 @@ def __Get(self, path, request_params, headers, **kwargs): **kwargs ) - def __Post(self, path, request_params, body, headers, **kwargs): + def __Post(self, path, request_params, body, req_headers, **kwargs): """Azure Cosmos 'POST' http request. :params str url: :params str path: :params (str, unicode, dict) body: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2433,7 +2496,7 @@ def __Post(self, path, request_params, body, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.post(url=path, headers=headers) + request = self.pipeline_client.post(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2445,13 +2508,13 @@ def __Post(self, path, request_params, body, headers, **kwargs): **kwargs ) - def __Put(self, path, request_params, body, headers, **kwargs): + def __Put(self, path, request_params, body, req_headers, **kwargs): """Azure Cosmos 'PUT' http request. :params str url: :params str path: :params (str, unicode, dict) body: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2459,7 +2522,7 @@ def __Put(self, path, request_params, body, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.put(url=path, headers=headers) + request = self.pipeline_client.put(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2471,12 +2534,12 @@ def __Put(self, path, request_params, body, headers, **kwargs): **kwargs ) - def __Delete(self, path, request_params, headers, **kwargs): + def __Delete(self, path, request_params, req_headers, **kwargs): """Azure Cosmos 'DELETE' http request. :params str url: :params str path: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2484,7 +2547,7 @@ def __Delete(self, path, request_params, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.delete(url=path, headers=headers) + request = self.pipeline_client.delete(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2496,7 +2559,7 @@ def __Delete(self, path, request_params, headers, **kwargs): **kwargs ) - def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None): + def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): """Query Feed for Document Collection resource. :param str path: @@ -2522,6 +2585,7 @@ def QueryFeed(self, path, collection_id, query, options, partition_key_range_id= query, options, partition_key_range_id, + **kwargs ), self.last_response_headers, ) @@ -2598,8 +2662,8 @@ def __GetBodiesFromQueryResult(result): # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) - headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Post(path, request_params, query, headers, **kwargs) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Post(path, request_params, query, req_headers, **kwargs) if response_hook: response_hook(self.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 2e07955ab0ea..6b5e52769193 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -66,7 +66,7 @@ def needsRetry(self, error_code): def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py index 90422376d450..2f773de8735a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py @@ -61,7 +61,7 @@ def __init__(self, connection_policy, global_endpoint_manager, *args): def ShouldRetry(self, exception): # pylint: disable=unused-argument """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py index 19d29ab45866..cb554127c276 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py @@ -128,7 +128,8 @@ def compare(self, doc_producer1, doc_producer2): # pylint: disable=no-self-use ) -class _OrderByHelper: +class _OrderByHelper(object): + @staticmethod def getTypeOrd(orderby_item): """Returns the ordinal of the value of the item pair in the dictionary. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index b2c1752b40c3..2c37510a63a4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -24,7 +24,7 @@ import json from six.moves import xrange -from azure.cosmos.errors import HTTPFailure +from azure.cosmos.errors import CosmosHttpResponseError from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo @@ -42,7 +42,7 @@ def _is_partitioned_execution_info(e): def _get_partitioned_execution_info(e): - error_msg = json.loads(e._http_error_message) + error_msg = json.loads(e.http_error_message) return _PartitionedQueryExecutionInfo(json.loads(error_msg["additionalErrorInfo"])) @@ -76,7 +76,7 @@ def next(self): """ try: return next(self._execution_context) - except HTTPFailure as e: + except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_execution_info = _get_partitioned_execution_info(e) self._execution_context = self._create_pipelined_execution_context(query_execution_info) @@ -97,7 +97,7 @@ def fetch_next_block(self): """ try: return self._execution_context.fetch_next_block() - except HTTPFailure as e: + except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_execution_info = _get_partitioned_execution_info(e) self._execution_context = self._create_pipelined_execution_context(query_execution_info) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py index b4bb6cda0703..d4dc37ee7533 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py @@ -126,13 +126,13 @@ def _GetDatabaseAccount(self): # specified (by creating a locational endpoint) and keeping eating the exception # until we get the database account and return None at the end, if we are not able # to get that info from any endpoints - except errors.HTTPFailure: + except errors.CosmosHttpResponseError: for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: database_account = self._GetDatabaseAccountStub(locational_endpoint) return database_account - except errors.HTTPFailure: + except errors.CosmosHttpResponseError: pass return None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 65f8a8fd2a2d..d1cf600be217 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -21,18 +21,29 @@ """Iterable query results in the Azure Cosmos database service. """ +from azure.core.paging import PageIterator # type: ignore from azure.cosmos._execution_context import execution_dispatcher from azure.cosmos._execution_context import base_execution_context # pylint: disable=protected-access -class QueryIterable(object): +class QueryIterable(PageIterator): """Represents an iterable object of the query results. QueryIterable is a wrapper for query execution context. """ - def __init__(self, client, query, options, fetch_function, collection_link=None): + def __init__( + self, + client, + query, + options, + fetch_function=None, + collection_link=None, + database_link=None, + partition_key=None, + continuation_token=None, + ): """ Instantiates a QueryIterable for non-client side partitioning queries. _ProxyQueryExecutionContext will be used as the internal query execution context @@ -56,41 +67,19 @@ def __init__(self, client, query, options, fetch_function, collection_link=None) self.retry_options = client.connection_policy.RetryOptions self._query = query self._options = options + if continuation_token: + options['continuation'] = continuation_token self._fetch_function = fetch_function self._collection_link = collection_link - self._ex_context = None - - @classmethod - def PartitioningQueryIterable(cls, client, query, options, database_link, partition_key): - """ - Represents a client side partitioning query iterable. - - This constructor instantiates a QueryIterable for - client side partitioning queries, and sets _MultiCollectionQueryExecutionContext - as the internal execution context. - - :param CosmosClient client: - Instance of document client - :param (str or dict) options: - :param dict options: - The request options for the request. - :param str database_link: - Database self link or ID based link - :param str partition_key: - Partition key for the query - """ - # This will call the base constructor(__init__ method above) - - self = cls(client, query, options, None, None) - self._database_link = database_link # pylint: disable=attribute-defined-outside-init - self._partition_key = partition_key # pylint: disable=attribute-defined-outside-init - - return self + self._database_link = database_link + self._partition_key = partition_key + self._ex_context = self._create_execution_context() + super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) def _create_execution_context(self): """instantiates the internal query execution context based. """ - if hasattr(self, "_database_link"): + if self._database_link: # client side partitioning query return base_execution_context._MultiCollectionQueryExecutionContext( self._client, self._options, self._database_link, self._query, self._partition_key @@ -99,29 +88,16 @@ def _create_execution_context(self): self._client, self._collection_link, self._query, self._options, self._fetch_function ) - def __iter__(self): - """Makes this class iterable. - """ - return self.Iterator(self) - - class Iterator(object): - def __init__(self, iterable): - self._iterable = iterable - self._finished = False - self._ex_context = iterable._create_execution_context() - - def __iter__(self): - # Always returns self - return self + def _unpack(self, block): + continuation = None + if self._client.last_response_headers: + continuation = self._client.last_response_headers.get("x-ms-continuation") or \ + self._client.last_response_headers.get('etag') + if block: + self._did_a_call_already = False + return continuation, block - def __next__(self): - return next(self._ex_context) - - # Also support Python 3.x iteration - def next(self): - return self.__next__() - - def fetch_next_block(self): + def _fetch_next(self, *args): # pylint: disable=unused-argument """Returns a block of results with respecting retry policy. This method only exists for backward compatibility reasons. (Because QueryIterable @@ -132,9 +108,7 @@ def fetch_next_block(self): :rtype: list """ - - if self._ex_context is None: - # initiates execution context for the first time - self._ex_context = self._create_execution_context() - - return self._ex_context.fetch_next_block() + block = self._ex_context.fetch_next_block() + if not block: + raise StopIteration + return block diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py index 8e027e0fcc2e..e21454ec7792 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py @@ -36,7 +36,7 @@ def __init__(self, max_retry_attempt_count, fixed_retry_interval_in_milliseconds def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index e787857de9e8..df575cb27d36 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -80,7 +80,7 @@ def Execute(client, global_endpoint_manager, function, *args, **kwargs): ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds return result - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: retry_policy = None if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: retry_policy = endpointDiscovery_retry_policy diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py index 6396de351536..fc9e640b0899 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py @@ -23,7 +23,7 @@ """ -class MediaTypes: +class MediaTypes(object): """Constants of media types. http://www.iana.org/assignments/media-types/media-types.xhtml diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py index c80a53c0c5f1..dd1a573eaa4a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py @@ -29,7 +29,7 @@ from . import _base from . import http_constants from ._vector_session_token import VectorSessionToken -from .errors import HTTPFailure +from .errors import CosmosHttpResponseError class SessionContainer(object): @@ -196,15 +196,15 @@ def parse_session_token(response_headers): id_ = tokens[0] sessionToken = VectorSessionToken.create(tokens[1]) if sessionToken is None: - raise HTTPFailure( - http_constants.StatusCodes.INTERNAL_SERVER_ERROR, - "Could not parse the received session token: %s" % tokens[1], + raise CosmosHttpResponseError( + status_code=http_constants.StatusCodes.INTERNAL_SERVER_ERROR, + message="Could not parse the received session token: %s" % tokens[1], ) id_to_sessionlsn[id_] = sessionToken return id_to_sessionlsn -class Session: +class Session(object): """ State of a Azure Cosmos session. This session object can be shared across clients within the same process diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py index 9f2c14be6a1f..01ae7778a7f4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py @@ -62,7 +62,7 @@ def __init__(self, endpoint_discovery_enable, global_endpoint_manager, *args): def ShouldRetry(self, _exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py index 618541feb6b0..7f1b900303ca 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py @@ -24,8 +24,9 @@ import json -from six.moves.urllib.parse import urlparse, urlencode +from six.moves.urllib.parse import urlparse import six +from azure.core.exceptions import DecodeError # type: ignore from . import documents from . import errors @@ -86,6 +87,8 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin tuple of (dict, dict) """ + # pylint: disable=protected-access + is_media = request.url.find("media") > -1 is_media_stream = is_media and connection_policy.MediaReadMode == documents.MediaReadMode.Streamed @@ -125,7 +128,7 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin connection_timeout=connection_timeout, connection_verify=kwargs.pop("connection_verify", ca_certs), connection_cert=kwargs.pop("connection_cert", cert_files), - + **kwargs ) else: response = pipeline_client._pipeline.run( @@ -133,7 +136,8 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin stream=is_media_stream, connection_timeout=connection_timeout, # If SSL is disabled, verify = false - connection_verify=kwargs.pop("connection_verify", is_ssl_enabled) + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), + **kwargs ) response = response.http_response @@ -149,8 +153,14 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin # python 3 compatible: convert data from byte to unicode string data = data.decode("utf-8") + if response.status_code == 404: + raise errors.CosmosResourceNotFoundError(message=data, response=response) + if response.status_code == 409: + raise errors.CosmosResourceExistsError(message=data, response=response) + if response.status_code == 412: + raise errors.CosmosAccessConditionFailedError(message=data, response=response) if response.status_code >= 400: - raise errors.HTTPFailure(response.status_code, data, headers) + raise errors.CosmosHttpResponseError(message=data, response=response) result = None if is_media: @@ -159,8 +169,11 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin if data: try: result = json.loads(data) - except: - raise errors.JSONParseFailure(data) + except Exception as e: + raise DecodeError( + message="Failed to decode JSON data: {}".format(e), + response=response, + error=e) return (result, headers) @@ -180,7 +193,7 @@ def SynchronizedRequest( :param object client: Document client instance :param dict request_params: - :param _GlobalEndpointManager global_endpoint_manager: + :param _GlobalEndpointManager global_endpoint_manager: :param documents.ConnectionPolicy connection_policy: :param azure.core.PipelineClient pipeline_client: PipelineClient to process the request. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py index 4e24a938d2d7..f44e3a906bda 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py @@ -24,17 +24,13 @@ import platform import re -from . import http_constants +from .version import VERSION def get_user_agent(): - os_name = safe_user_agent_header(platform.system()) - os_version = safe_user_agent_header(platform.release()) + os_name = safe_user_agent_header(platform.platform()) python_version = safe_user_agent_header(platform.python_version()) - - user_agent = "{}/{} Python/{} {}/{}".format( - os_name, os_version, python_version, http_constants.Versions.SDKName, http_constants.Versions.SDKVersion - ) + user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format(VERSION, python_version, os_name) return user_agent diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py index 1c6832f31d3c..675f0801632d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py @@ -120,11 +120,10 @@ def merge(self, other): raise ValueError("Invalid Session Token (should not be None)") if self.version == other.version and len(self.local_lsn_by_region) != len(other.local_lsn_by_region): - raise errors.CosmosError( - Exception( - "Status Code: %s. Compared session tokens '%s' and '%s' have unexpected regions." - % (StatusCodes.INTERNAL_SERVER_ERROR, self.session_token, other.session_token) - ) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) ) if self.version < other.version: @@ -148,11 +147,10 @@ def merge(self, other): if local_lsn2 is not None: highest_local_lsn_by_region[region_id] = max(local_lsn1, local_lsn2) elif self.version == other.version: - raise errors.CosmosError( - Exception( - "Status Code: %s. Compared session tokens '%s' and '%s' have unexpected regions." - % (StatusCodes.INTERNAL_SERVER_ERROR, self.session_token, other.session_token) - ) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) ) else: highest_local_lsn_by_region[region_id] = local_lsn1 diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index db929854e529..bb94879c7923 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -22,26 +22,29 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .errors import HTTPFailure +from ._base import build_options +from .errors import CosmosResourceNotFoundError from .http_constants import StatusCodes from .offer import Offer -from .scripts import Scripts -from ._query_iterable import QueryIterable +from .scripts import ScriptsProxy from .partition_key import NonePartitionKeyValue -__all__ = ("Container",) +__all__ = ("ContainerProxy",) # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class Container: - """ An Azure Cosmos DB container. +class ContainerProxy(object): + """ + An interface to interact with a specific DB Container. + This class should not be instantiated directly, use :func:`DatabaseProxy.get_container_client` method. A container in an Azure Cosmos DB SQL API database is a collection of documents, each of which represented as an Item. @@ -62,28 +65,30 @@ def __init__(self, client_connection, database_link, id, properties=None): # py self._properties = properties self.container_link = u"{}/colls/{}".format(database_link, self.id) self._is_system_key = None - self._scripts = None + self._scripts = None # type: Optional[ScriptsProxy] def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @property def is_system_key(self): + # type: () -> bool if self._is_system_key is None: properties = self._get_properties() self._is_system_key = ( properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False ) - return self._is_system_key + return cast('bool', self._is_system_key) @property def scripts(self): + # type: () -> ScriptsProxy if self._scripts is None: - self._scripts = Scripts(self.client_connection, self.container_link, self.is_system_key) - return self._scripts + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + return cast('ScriptsProxy', self._scripts) def _get_document_link(self, item_or_link): # type: (Union[Dict[str, Any], str]) -> str @@ -97,20 +102,22 @@ def _get_conflict_link(self, conflict_or_link): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] + def _set_partition_key(self, partition_key): + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + return partition_key + @distributed_trace def read( self, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - populate_partition_key_range_statistics=None, - populate_quota_info=None, - request_options=None, - response_hook=None, - **kwargs + populate_query_metrics=None, # type: Optional[bool] + populate_partition_key_range_statistics=None, # type: Optional[bool] + populate_quota_info=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (str, Dict[str, str], bool, bool, bool, Dict[str, Any], Optional[Callable]) -> Container - """ Read the container properties + # type: (...) -> Dict[str, Any] + """ + Read the container properties :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -120,17 +127,13 @@ def read( :param populate_quota_info: Enable returning collection storage quota information in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `HTTPFailure`: Raised if the container couldn't be retrieved. This includes + :raise `CosmosHttpResponseError`: Raised if the container couldn't be retrieved. This includes if the container does not exist. - :returns: :class:`Container` instance representing the retrieved container. - + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if populate_partition_key_range_statistics is not None: @@ -139,29 +142,27 @@ def read( request_options["populateQuotaInfo"] = populate_quota_info collection_link = self.container_link - self._properties = self.client_connection.ReadContainer(collection_link, options=request_options, **kwargs) + self._properties = self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace def read_item( self, item, # type: Union[str, Dict[str, Any]] partition_key, # type: Any - session_token=None, # type: str - initial_headers=None, # type: # type: Dict[str, str] - populate_query_metrics=None, # type: bool - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] """ - Get the item identified by `id`. + Get the item identified by `item`. :param item: The ID (name) or dict representing item to retrieve. :param partition_key: Partition key for the item to retrieve. @@ -172,7 +173,8 @@ def read_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: Dict representing the item to be retrieved. - :raise `HTTPFailure`: If the given item couldn't be retrieved. + :raise `CosmosHttpResponseError`: If the given item couldn't be retrieved. + :rtype: dict[str, Any] .. literalinclude:: ../../examples/examples.py :start-after: [START update_item] @@ -184,15 +186,11 @@ def read_item( """ doc_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - if not request_options: - request_options = {} # type: Dict[str, Any] if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if post_trigger_include: @@ -206,16 +204,13 @@ def read_item( @distributed_trace def read_all_items( self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all items in the container. + # type: (...) -> Iterable[Dict[str, Any]] + """ + List all items in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. @@ -224,15 +219,12 @@ def read_all_items( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -249,30 +241,29 @@ def read_all_items( @distributed_trace def query_items_change_feed( self, - partition_key_range_id=None, - is_start_from_beginning=False, - continuation=None, - max_item_count=None, - feed_options=None, - response_hook=None, - **kwargs + partition_key_range_id=None, # type: Optional[str] + is_start_from_beginning=False, # type: bool + continuation=None, # type: Optional[str] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any ): - """ Get a sorted list of items that were changed, in the order in which they were modified. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Get a sorted list of items that were changed, in the order in which they were modified. :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. - This is used to process the change feed in parallel across multiple consumers. + This is used to process the change feed in parallel across multiple consumers. :param is_start_from_beginning: Get whether change feed should start from - beginning (true) or from current (false). - By default it's start from current (false). + beginning (true) or from current (false). By default it's start from current (false). :param continuation: e_tag value to be used as continuation for reading change feed. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key_range_id is not None: feed_options["partitionKeyRangeId"] = partition_key_range_id if is_start_from_beginning is not None: @@ -296,27 +287,28 @@ def query_items_change_feed( def query_items( self, query, # type: str - parameters=None, # type: List - partition_key=None, # type: Any - enable_cross_partition_query=None, # type: bool - max_item_count=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - enable_scan_in_query=None, # type: bool - populate_query_metrics=None, # type: bool - feed_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + parameters=None, # type: Optional[List[str]] + partition_key=None, # type: Optional[Any] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + enable_scan_in_query=None, # type: Optional[bool] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> QueryIterable - """Return all results matching the given `query`. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but typically the container name is used. + In the examples below, the container name is "products," and is aliased as "p" for easier referencing + in the WHERE clause. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param partition_key: Specifies the partition key value for the item. :param enable_cross_partition_query: Allows sending of more than one request to execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. + More than one request is necessary if the query is not scoped to single partition key value. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -326,10 +318,7 @@ def query_items( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). - - You can use any value for the container name in the FROM clause, but typically the container name is used. - In the examples below, the container name is "products," and is aliased as "p" for easier referencing - in the WHERE clause. + :rtype: Iterable[dict[str, Any]] .. literalinclude:: ../../examples/examples.py :start-after: [START query_items] @@ -348,16 +337,12 @@ def query_items( :name: query_items_param """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if enable_cross_partition_query is not None: feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics if partition_key is not None: @@ -369,7 +354,7 @@ def query_items( response_hook.clear() items = self.client_connection.QueryItems( - database_or_Container_link=self.container_link, + database_or_container_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, partition_key=partition_key, @@ -385,18 +370,14 @@ def replace_item( self, item, # type: Union[str, Dict[str, Any]] body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Replaces the specified item if it exists in the container. + """ + Replaces the specified item if it exists in the container. :param item: The ID (name) or dict representing item to be replaced. :param body: A dict-like object representing the item to replace. @@ -409,19 +390,13 @@ def replace_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the item after replace went through. - :raise `HTTPFailure`: If the replace failed or the item with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the item with given id does not exist. + :rtype: dict[str, Any] """ item_link = self._get_document_link(item) - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -440,18 +415,15 @@ def replace_item( def upsert_item( self, body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Insert or update the specified item. + """ + Insert or update the specified item. + If the item already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the item to update or insert. :param session_token: Token for use with Session consistency. @@ -463,20 +435,12 @@ def upsert_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted item. - :raise `HTTPFailure`: If the given item could not be upserted. - - If the item already exists in the container, it is replaced. If it does not, it is inserted. - + :raise `CosmosHttpResponseError`: If the given item could not be upserted. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -485,7 +449,7 @@ def upsert_item( request_options["postTriggerInclude"] = post_trigger_include result = self.client_connection.UpsertItem( - database_or_Container_link=self.container_link, document=body, **kwargs) + database_or_container_link=self.container_link, document=body, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result @@ -494,19 +458,16 @@ def upsert_item( def create_item( self, body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - indexing_directive=None, # type: Any - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Create an item in the container. + """ + Create an item in the container. + To update or replace an existing item, use the :func:`ContainerProxy.upsert_item` method. :param body: A dict-like object representing the item to create. :param session_token: Token for use with Session consistency. @@ -519,21 +480,13 @@ def create_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new item. - :raises `HTTPFailure`: If item with the given ID already exists. - - To update or replace an existing item, use the :func:`Container.upsert_item` method. - + :raises `CosmosHttpResponseError`: If item with the given ID already exists. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableAutomaticIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -544,7 +497,7 @@ def create_item( request_options["indexingDirective"] = indexing_directive result = self.client_connection.CreateItem( - database_or_Container_link=self.container_link, document=body, options=request_options, **kwargs + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) @@ -555,18 +508,14 @@ def delete_item( self, item, # type: Union[Dict[str, Any], str] partition_key, # type: Any - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> None - """ Delete the specified item from the container. + """ + Delete the specified item from the container. :param item: The ID (name) or dict representing item to be deleted. :param partition_key: Specifies the partition key value for the item. @@ -578,20 +527,14 @@ def delete_item( :param post_trigger_include: trigger id to be used as post operation trigger. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The item wasn't deleted successfully. If the item does not + :raises `CosmosHttpResponseError`: The item wasn't deleted successfully. If the item does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -605,15 +548,17 @@ def delete_item( response_hook(self.client_connection.last_response_headers, result) @distributed_trace - def read_offer(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> Offer - """ Read the Offer object for this container. + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """ + Read the Offer object for this container. :param response_hook: a callable invoked with the response metadata :returns: Offer for the container. - :raise HTTPFailure: If no offer exists for the container or if the offer could not be retrieved. - + :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be retrieved. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -622,7 +567,9 @@ def read_offer(self, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) if response_hook: response_hook(self.client_connection.last_response_headers, offers) @@ -630,16 +577,18 @@ def read_offer(self, response_hook=None, **kwargs): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace - def replace_throughput(self, throughput, response_hook=None, **kwargs): - # type: (int, Optional[Callable]) -> Offer - """ Replace the container's throughput + def replace_throughput(self, throughput, **kwargs): + # type: (int, Any) -> Offer + """ + Replace the container's throughput :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the container, updated with new throughput. - :raise HTTPFailure: If no offer exists for the container or if the offer could not be updated. - + :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be updated. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -648,7 +597,9 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) @@ -659,18 +610,19 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) @distributed_trace - def read_all_conflicts(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all conflicts in the container. + def list_conflicts(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all conflicts in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of conflicts (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -684,32 +636,31 @@ def read_all_conflicts(self, max_item_count=None, feed_options=None, response_ho @distributed_trace def query_conflicts( self, - query, - parameters=None, - enable_cross_partition_query=None, - partition_key=None, - max_item_count=None, - feed_options=None, - response_hook=None, - **kwargs + query, # type: str + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + partition_key=None, # type: Optional[Any] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any ): - # type: (str, List, bool, Any, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all conflicts matching the given `query`. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Return all conflicts matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param partition_key: Specifies the partition key value for the item. :param enable_cross_partition_query: Allows sending of more than one request to execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. + More than one request is necessary if the query is not scoped to single partition key value. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of conflicts (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if enable_cross_partition_query is not None: @@ -728,20 +679,21 @@ def query_conflicts( return result @distributed_trace - def get_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> Dict[str, str] - """ Get the conflict identified by `id`. + def get_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] + """ + Get the conflict identified by `conflict`. :param conflict: The ID (name) or dict representing the conflict to retrieve. :param partition_key: Partition key for the conflict to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved conflict. - :raise `HTTPFailure`: If the given conflict couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given conflict couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) @@ -753,20 +705,21 @@ def get_conflict(self, conflict, partition_key, request_options=None, response_h return result @distributed_trace - def delete_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified conflict from the container. + def delete_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> None + """ + Delete the specified conflict from the container. :param conflict: The ID (name) or dict representing the conflict to be deleted. :param partition_key: Partition key for the conflict to delete. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The conflict wasn't deleted successfully. If the conflict + :raises `CosmosHttpResponseError`: The conflict wasn't deleted successfully. If the conflict does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) @@ -775,8 +728,3 @@ def delete_conflict(self, conflict, partition_key, request_options=None, respons ) if response_hook: response_hook(self.client_connection.last_response_headers, result) - - def _set_partition_key(self, partition_key): - if partition_key == NonePartitionKeyValue: - return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) - return partition_key diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 9b0aa1836831..9387163ce416 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -22,91 +22,205 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Callable, Dict, Mapping, Optional, Union, cast +from typing import Any, Dict, Mapping, Optional, Union, cast, Iterable, List import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .database import Database +from ._base import build_options +from .database import DatabaseProxy from .documents import ConnectionPolicy, DatabaseAccount -from ._query_iterable import QueryIterable __all__ = ("CosmosClient",) -class CosmosClient: +def _parse_connection_str(conn_str, credential): + # type: (str, Optional[Any]) -> Dict[str, str] + conn_str = conn_str.rstrip(";") + conn_settings = dict( # type: ignore # pylint: disable=consider-using-dict-comprehension + s.split("=", 1) for s in conn_str.split(";") + ) + if 'AccountEndpoint' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountEndpoint'.") + if not credential and 'AccountKey' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountKey'.") + return conn_settings + + +def _build_auth(credential): + # type: (Any) -> Dict[str, Any] + auth = {} + if isinstance(credential, six.string_types): + auth['masterKey'] = credential + elif isinstance(credential, dict): + if any(k for k in credential.keys() if k in ['masterKey', 'resourceTokens', 'permissionFeed']): + return credential # Backwards compatible + auth['resourceTokens'] = credential # type: ignore + elif hasattr(credential, '__iter__'): + auth['permissionFeed'] = credential + else: + raise TypeError( + "Unrecognized credential type. Please supply the master key as str, " + "or a dictionary or resource tokens, or a list of permissions.") + return auth + + +def _build_connection_policy(kwargs): + # type: (Dict[str, Any]) -> ConnectionPolicy + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.MediaRequestTimeout = kwargs.pop('media_request_timeout', None) or policy.MediaRequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.MediaReadMode = kwargs.pop('media_read_mode', None) or policy.MediaReadMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry = kwargs.pop('retry_options', None) or policy.RetryOptions + retry._max_retry_attempt_count = kwargs.pop('retry_total', None) or retry._max_retry_attempt_count + retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry._fixed_retry_interval_in_milliseconds + retry._max_wait_time_in_seconds = kwargs.pop('retry_backoff_max', None) or retry._max_wait_time_in_seconds + policy.RetryOptions = retry + + return policy + + +class CosmosClient(object): """ Provides a client-side logical representation of an Azure Cosmos DB account. Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: + Can be the account key, or a dictionary of resource tokens. + :type credential: str or dict(str, str) + :param str consistency_level: + Consistency level to use for the session. The default value is "Session". + + **Keyword arguments:** + + *request_timeout* - The HTTP request timeout in seconds. + *media_request_timeout* - The media request timeout in seconds. + *connection_mode* - The connection mode for the client - currently only supports 'Gateway'. + *media_read_mode* - The mode for use with downloading attachment content - default value is `Buffered`. + *proxy_config* - Instance of ~azure.cosmos.documents.ProxyConfiguration + *ssl_config* - Instance of ~azure.cosmos.documents.SSLConfiguration + *connection_verify* - Whether to verify the connection, default value is True. + *connection_cert* - An alternative certificate to verify the connection. + *retry_total* - Maximum retry attempts. + *retry_backoff_max* - Maximum retry wait time in seconds. + *retry_fixed_interval* - Fixed retry interval in milliseconds. + *enable_endpoint_discovery* - Enable endpoint discovery for geo-replicated database accounts. Default is True. + *preferred_locations* - The preferred locations for geo-replicated database accounts. + When `enable_endpoint_discovery` is true and `preferred_locations` is non-empty, + the client will use this list to evaluate the final location, taking into consideration + the order specified in `preferred_locations` list. The locations in this list are specified + as the names of the azure Cosmos locations like, 'West US', 'East US', 'Central India' + and so on. + *connection_policy* - An instance of ~azure.cosmos.documents.ConnectionPolicy + + .. literalinclude:: ../../examples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client """ - def __init__( - self, url, auth, consistency_level="Session", connection_policy=None - ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs,line-too-long - # type: (str, Dict[str, str], str, ConnectionPolicy) -> None - """ Instantiate a new CosmosClient. - - :param url: The URL of the Cosmos DB account. - :param auth: - Contains 'masterKey' or 'resourceTokens', where - auth['masterKey'] is the default authorization key to use to - create the client, and auth['resourceTokens'] is the alternative - authorization key. - :param consistency_level: Consistency level to use for the session. - :param connection_policy: Connection policy to use for the session. + def __init__(self, url, credential, consistency_level="Session", **kwargs): + # type: (str, Any, str, Any) -> None + """ Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, auth=auth, consistency_level=consistency_level, connection_policy=connection_policy, **kwargs + ) - .. literalinclude:: ../../examples/examples.py - :start-after: [START create_client] - :end-before: [END create_client] - :language: python - :dedent: 0 - :caption: Create a new instance of the Cosmos DB client: - :name: create_client + def __enter__(self): + self.client_connection.pipeline_client.__enter__() + return self + + def __exit__(self, *args): + return self.client_connection.pipeline_client.__exit__(*args) + @classmethod + def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): + # type: (str, Optional[Any], str, Any) -> CosmosClient """ - self.client_connection = CosmosClientConnection( - url, auth, consistency_level=consistency_level, connection_policy=connection_policy + Create CosmosClient from a connection string. + + This can be retrieved from the Azure portal.For full list of optional keyword + arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :param credential: Alternative credentials to use instead of the key provided in the + connection string. + :type credential: str or dict(str, str) + :param str consistency_level: Consistency level to use for the session. The default value is "Session". + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=credential or settings['AccountKey'], + consistency_level=consistency_level, + **kwargs ) @staticmethod def _get_database_link(database_or_id): - # type: (str) -> str + # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str if isinstance(database_or_id, six.string_types): return "dbs/{}".format(database_or_id) try: - return cast("Database", database_or_id).database_link + return cast("DatabaseProxy", database_or_id).database_link except AttributeError: pass database_id = cast("Dict[str, str]", database_or_id)["id"] return "dbs/{}".format(database_id) @distributed_trace - def create_database( + def create_database( # pylint: disable=redefined-builtin self, - id, # pylint: disable=redefined-builtin - session_token=None, - initial_headers=None, - access_condition=None, - populate_query_metrics=None, - offer_throughput=None, - request_options=None, - response_hook=None, - **kwargs + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any ): - # type: (str, str, Dict[str, str], Dict[str, str], bool, int, Dict[str, Any], Optional[Callable]) -> Database - """Create a new database with the given ID (name). + # type: (...) -> DatabaseProxy + """ + Create a new database with the given ID (name). :param id: ID (name) of the database to create. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param access_condition: Conditions Associated with the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param offer_throughput: The provisioned throughput for this offer. - :param request_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata - :returns: A :class:`Database` instance representing the new database. - :raises `HTTPFailure`: If database with the given ID already exists. + :param str session_token: Token for use with Session consistency. + :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. + :param dict(str, str) access_condition: Conditions Associated with the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :param dict(str, Any) request_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.database.DatabaseProxy + :raises `CosmosResourceExistsError`: If database with the given ID already exists. .. literalinclude:: ../../examples/examples.py :start-after: [START create_database] @@ -118,14 +232,8 @@ def create_database( """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: @@ -134,59 +242,52 @@ def create_database( result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) - return Database(self.client_connection, id=result["id"], properties=result) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) def get_database_client(self, database): - # type: (Union[str, Database, Dict[str, Any]]) -> Database + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy """ Retrieve an existing database with the ID (name) `id`. - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or `DatabaseProxy` instance of the database to read. - :returns: A :class:`Database` instance representing the retrieved database. - + :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.database.DatabaseProxy """ - if isinstance(database, Database): + if isinstance(database, DatabaseProxy): id_value = database.id elif isinstance(database, Mapping): id_value = database["id"] else: id_value = database - return Database(self.client_connection, id_value) + return DatabaseProxy(self.client_connection, id_value) @distributed_trace - def read_all_databases( + def list_databases( self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable + # type: (...) -> Iterable[Dict[str, Any]] """ List the databases in a Cosmos DB SQL database account. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param feed_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, str] feed_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). - + :rtype: Iterable[dict[str, str]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -198,45 +299,36 @@ def read_all_databases( @distributed_trace def query_databases( self, - query=None, # type: str - parameters=None, # type: List[str] - enable_cross_partition_query=None, # type: bool - max_item_count=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str,str] - populate_query_metrics=None, # type: bool - feed_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> QueryIterable - + # type: (...) -> Iterable[Dict[str, Any]] """ Query the databases in a Cosmos DB SQL database account. - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param enable_cross_partition_query: Allow scan on the queries which couldn't be + :param str query: The Azure Cosmos DB SQL query to execute. + :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param feed_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, Any] feed_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). - + :rtype: Iterable[dict[str, str]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if enable_cross_partition_query is not None: feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -246,7 +338,7 @@ def query_databases( # (just returning a generator did not initiate the first network call, so # the headers were misleading) # This needs to change for "real" implementation - query = query if parameters is None else dict(query=query, parameters=parameters) + query = query if parameters is None else dict(query=query, parameters=parameters) # type: ignore result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) else: result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) @@ -257,38 +349,28 @@ def query_databases( @distributed_trace def delete_database( self, - database, # type: Union[str, Database, Dict[str, Any]] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + database, # type: Union[str, DatabaseProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): # type: (...) -> None """ Delete the database with the given ID (name). - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to delete. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param access_condition: Conditions Associated with the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param request_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata - :raise HTTPFailure: If the database couldn't be deleted. - + :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param dict[str, str] access_condition: Conditions Associated with the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, str] request_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata + :raise CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -298,15 +380,16 @@ def delete_database( response_hook(self.client_connection.last_response_headers) @distributed_trace - def get_database_account(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> DatabaseAccount + def get_database_account(self, **kwargs): + # type: (Any) -> DatabaseAccount """ Retrieve the database account information. - :param response_hook: a callable invoked with the response metadata - :returns: A :class:`DatabaseAccount` instance representing the Cosmos DB Database Account. - + :param Callable response_hook: a callable invoked with the response metadata + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.documents.DatabaseAccount """ + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.GetDatabaseAccount(**kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 0df8589b70c2..a1ee733918b5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -22,26 +22,29 @@ """Create, read, update and delete containers in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Mapping, Union, cast +from typing import Any, List, Dict, Mapping, Union, cast, Iterable, Optional import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .container import Container +from ._base import build_options +from .container import ContainerProxy from .offer import Offer from .http_constants import StatusCodes -from .errors import HTTPFailure -from .user import User -from ._query_iterable import QueryIterable +from .errors import CosmosResourceNotFoundError +from .user import UserProxy -__all__ = ("Database",) +__all__ = ("DatabaseProxy",) # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class Database(object): - """ Represents an Azure Cosmos DB SQL API database. +class DatabaseProxy(object): + """ + An interface to interact with a specific database. + This class should not be instantiated directly, use :func:`CosmosClient.get_database_client` method. A database contains one or more containers, each of which can contain items, stored procedures, triggers, and user-defined functions. @@ -75,25 +78,25 @@ def __init__(self, client_connection, id, properties=None): # pylint: disable=r @staticmethod def _get_container_id(container_or_id): - # type: (Union[str, Container, Dict[str, Any]]) -> str + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str if isinstance(container_or_id, six.string_types): return container_or_id try: - return cast("Container", container_or_id).id + return cast("ContainerProxy", container_or_id).id except AttributeError: pass return cast("Dict[str, str]", container_or_id)["id"] def _get_container_link(self, container_or_id): - # type: (Union[str, Container, Dict[str, Any]]) -> str + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) def _get_user_link(self, user_or_id): - # type: (Union[User, str, Dict[str, Any]]) -> str + # type: (Union[UserProxy, str, Dict[str, Any]]) -> str if isinstance(user_or_id, six.string_types): return u"{}/users/{}".format(self.database_link, user_or_id) try: - return cast("User", user_or_id).user_link + return cast("UserProxy", user_or_id).user_link except AttributeError: pass return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) @@ -101,77 +104,61 @@ def _get_user_link(self, user_or_id): def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @distributed_trace - def read( - self, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - request_options=None, - response_hook=None, - **kwargs - ): - # type: (str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> Dict[str, Any] + def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] """ - Read the database properties + Read the database properties. - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to read. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. + :param bool populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: Dict[Str, Any] - :raise `HTTPFailure`: If the given database couldn't be retrieved. - + :rtype: Dict[Str, Any] + :raise `CosmosHttpResponseError`: If the given database couldn't be retrieved. """ # TODO this helper function should be extracted from CosmosClient from .cosmos_client import CosmosClient database_link = CosmosClient._get_database_link(self) - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - self._properties = self.client_connection.ReadDatabase(database_link, options=request_options, **kwargs) + self._properties = self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace def create_container( self, id, # type: str # pylint: disable=redefined-builtin - partition_key, # type: PartitionKey - indexing_policy=None, # type: Dict[str, Any] - default_ttl=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - offer_throughput=None, # type: int - unique_key_policy=None, # type: Dict[str, Any] - conflict_resolution_policy=None, # type: Dict[str, Any] - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any ): - # type: (...) -> Container + # type: (...) -> ContainerProxy """ Create a new container with the given ID (name). - If a container with the given ID already exists, an HTTPFailure with status_code 409 is raised. + If a container with the given ID already exists, a CosmosResourceExistsError is raised. :param id: ID (name) of container to create. :param partition_key: The partition key to use for the container. @@ -186,9 +173,9 @@ def create_container( :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`Container` instance representing the new container. - :raise HTTPFailure: The container creation failed. - + :returns: A `ContainerProxy` instance representing the new container. + :raise CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START create_container] @@ -219,14 +206,8 @@ def create_container( if conflict_resolution_policy: definition["conflictResolutionPolicy"] = conflict_resolution_policy - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: @@ -239,25 +220,21 @@ def create_container( if response_hook: response_hook(self.client_connection.last_response_headers, data) - return Container(self.client_connection, self.database_link, data["id"], properties=data) + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) @distributed_trace def delete_container( self, - container, # type: Union[str, Container, Dict[str, Any]] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): # type: (...) -> None - """ Delete the container + """ + Delete the container :param container: The ID (name) of the container to delete. You can either - pass in the ID of the container to delete, a :class:`Container` instance or + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or a dict representing the properties of the container. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -265,17 +242,11 @@ def delete_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise HTTPFailure: If the container couldn't be deleted. - + :raise CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -285,11 +256,13 @@ def delete_container( response_hook(self.client_connection.last_response_headers, result) def get_container_client(self, container): - # type: (Union[str, Container, Dict[str, Any]]) -> Container - """ Get the specified `Container`, or a container with specified ID (name). + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + """ + Get the specified `ContainerProxy`, or a container with specified ID (name). - :param container: The ID (name) of the container, a :class:`Container` instance, + :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, or a dict representing the properties of the container to be retrieved. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START get_container] @@ -300,28 +273,20 @@ def get_container_client(self, container): :name: get_container """ - if isinstance(container, Container): + if isinstance(container, ContainerProxy): id_value = container.id elif isinstance(container, Mapping): id_value = container["id"] else: id_value = container - return Container(self.client_connection, self.database_link, id_value) + return ContainerProxy(self.client_connection, self.database_link, id_value) @distributed_trace - def read_all_containers( - self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs - ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List the containers in the database. + def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + """ + List the containers in the database. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. @@ -330,6 +295,7 @@ def read_all_containers( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] .. literalinclude:: ../../examples/examples.py :start-after: [START list_containers] @@ -340,14 +306,10 @@ def read_all_containers( :name: list_containers """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -361,18 +323,15 @@ def read_all_containers( @distributed_trace def query_containers( self, - query=None, - parameters=None, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (str, List, int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """List properties for containers in the current database + # type: (...) -> Iterable[Dict[str, Any]] + """ + List properties for containers in the current database. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -383,16 +342,12 @@ def query_containers( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of container properties (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -409,29 +364,25 @@ def query_containers( @distributed_trace def replace_container( self, - container, # type: Union[str, Container, Dict[str, Any]] - partition_key, # type: PartitionKey - indexing_policy=None, # type: Dict[str, Any] - default_ttl=None, # type: int - conflict_resolution_policy=None, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> Container - """ Reset the properties of the container. Property changes are persisted immediately. - + # type: (...) -> ContainerProxy + """ + Reset the properties of the container. Property changes are persisted immediately. Any properties not specified will be reset to their default values. :param container: The ID (name), dict representing the properties or - :class:`Container` instance of the container to be replaced. + :class:`ContainerProxy` instance of the container to be replaced. :param partition_key: The partition key to use for the container. :param indexing_policy: The indexing policy to apply to the container. - :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :param session_token: Token for use with Session consistency. :param access_condition: Conditions Associated with the request. @@ -439,9 +390,10 @@ def replace_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `HTTPFailure`: Raised if the container couldn't be replaced. This includes + :raise `CosmosHttpResponseError`: Raised if the container couldn't be replaced. This includes if the container with given id does not exist. - :returns: :class:`Container` instance representing the container after replace completed. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START reset_container_properties] @@ -452,14 +404,8 @@ def replace_container( :name: reset_container_properties """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -484,23 +430,24 @@ def replace_container( if response_hook: response_hook(self.client_connection.last_response_headers, container_properties) - return Container( + return ContainerProxy( self.client_connection, self.database_link, container_properties["id"], properties=container_properties ) @distributed_trace - def read_all_users(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all users in the container. + def list_users(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all users in the container. :param max_item_count: Max number of users to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of user properties (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -512,9 +459,10 @@ def read_all_users(self, max_item_count=None, feed_options=None, response_hook=N return result @distributed_trace - def query_users(self, query, parameters=None, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all users matching the given `query`. + def query_users(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all users matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -522,10 +470,10 @@ def query_users(self, query, parameters=None, max_item_count=None, feed_options= :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of user properties (dicts). - + :rtype: Iterable[str, Any] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -540,38 +488,39 @@ def query_users(self, query, parameters=None, max_item_count=None, feed_options= return result def get_user_client(self, user): - # type: (Union[str, User, Dict[str, Any]]) -> User + # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy """ - Get the user identified by `id`. + Get the user identified by `user`. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be retrieved. - :returns: A :class:`User` instance representing the retrieved user. - :raise `HTTPFailure`: If the given user couldn't be retrieved. - + :returns: A `UserProxy` instance representing the retrieved user. + :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. + :rtype: ~azure.cosmos.user.UserProxy """ - if isinstance(user, User): + if isinstance(user, UserProxy): id_value = user.id elif isinstance(user, Mapping): id_value = user["id"] else: id_value = user - return User(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) @distributed_trace - def create_user(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Create a user in the container. + def create_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """ + Create a user in the container. + To update or replace an existing user, use the :func:`ContainerProxy.upsert_user` method. :param body: A dict-like object with an `id` key and value representing the user to be created. The user ID must be unique within the database, and consist of no more than 255 characters. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the new user. - :raise `HTTPFailure`: If the given user couldn't be created. - - To update or replace an existing user, use the :func:`Container.upsert_user` method. + :returns: A `UserProxy` instance representing the new user. + :raise `CosmosHttpResponseError`: If the given user couldn't be created. + :rtype: ~azure.cosmos.user.UserProxy .. literalinclude:: ../../examples/examples.py :start-after: [START create_user] @@ -582,8 +531,8 @@ def create_user(self, body, request_options=None, response_hook=None, **kwargs): :name: create_user """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) user = self.client_connection.CreateUser( database_link=self.database_link, user=body, options=request_options, **kwargs) @@ -591,26 +540,26 @@ def create_user(self, body, request_options=None, response_hook=None, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, user) - return User( + return UserProxy( client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) @distributed_trace - def upsert_user(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Insert or update the specified user. + def upsert_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """ + Insert or update the specified user. + If the user already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the user to update or insert. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the upserted user. - :raise `HTTPFailure`: If the given user could not be upserted. - - If the user already exists in the container, it is replaced. If it does not, it is inserted. - + :returns: A `UserProxy` instance representing the upserted user. + :raise `CosmosHttpResponseError`: If the given user could not be upserted. + :rtype: ~azure.cosmos.user.UserProxy """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) user = self.client_connection.UpsertUser( database_link=self.database_link, user=body, options=request_options, **kwargs @@ -619,53 +568,63 @@ def upsert_user(self, body, request_options=None, response_hook=None, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, user) - return User( + return UserProxy( client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) @distributed_trace - def replace_user(self, user, body, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Replaces the specified user if it exists in the container. + def replace_user( + self, + user, # type: Union[str, UserProxy, Dict[str, Any]] + body, # type: Dict[str, Any] + **kwargs # type: Any + ): + # type: (...) -> UserProxy + """ + Replaces the specified user if it exists in the container. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be replaced. :param body: A dict-like object representing the user to replace. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the user after replace went through. - :raise `HTTPFailure`: If the replace failed or the user with given id does not exist. - + :returns: A `UserProxy` instance representing the user after replace went through. + :raise `CosmosHttpResponseError`: If the replace failed or the user with given id does not exist. + :rtype: ~azure.cosmos.user.UserProxy """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - user = self.client_connection.ReplaceUser( + replaced_user = self.client_connection.ReplaceUser( user_link=self._get_user_link(user), user=body, options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, user) + response_hook(self.client_connection.last_response_headers, replaced_user) - return User( - client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user ) @distributed_trace - def delete_user(self, user, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified user from the container. + def delete_user(self, user, **kwargs): + # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None + """ + Delete the specified user from the container. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be deleted. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The user wasn't deleted successfully. If the user does not + :raises `CosmosHttpResponseError`: The user wasn't deleted successfully. If the user does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.DeleteUser( user_link=self._get_user_link(user), options=request_options, **kwargs @@ -674,15 +633,17 @@ def delete_user(self, user, request_options=None, response_hook=None, **kwargs): response_hook(self.client_connection.last_response_headers, result) @distributed_trace - def read_offer(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> Offer - """ Read the Offer object for this database. + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """ + Read the Offer object for this database. :param response_hook: a callable invoked with the response metadata :returns: Offer for the database. - :raise HTTPFailure: If no offer exists for the database or if the offer could not be retrieved. - + :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be retrieved. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -691,7 +652,9 @@ def read_offer(self, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for database " + self.database_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) if response_hook: response_hook(self.client_connection.last_response_headers, offers) @@ -699,16 +662,18 @@ def read_offer(self, response_hook=None, **kwargs): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace - def replace_throughput(self, throughput, response_hook=None, **kwargs): - # type: (int, Optional[Callable]) -> Offer - """ Replace the database level throughput. + def replace_throughput(self, throughput, **kwargs): + # type: (Optional[int], Any) -> Offer + """ + Replace the database level throughput. :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the database, updated with new throughput. - :raise HTTPFailure: If no offer exists for the database or if the offer could not be updated. - + :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be updated. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -717,7 +682,9 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for collection " + self.database_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for collection " + self.database_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py index 181136eb93f3..5fcb514959c1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py @@ -21,45 +21,45 @@ """PyCosmos Exceptions in the Azure Cosmos database service. """ +from azure.core.exceptions import ( # type: ignore # pylint: disable=unused-import + AzureError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError +) from . import http_constants -class CosmosError(Exception): - """Base class for all Azure Cosmos errors. - """ +class CosmosHttpResponseError(HttpResponseError): + """Raised when a HTTP request to the Azure Cosmos has failed.""" - -class HTTPFailure(CosmosError): - """Raised when a HTTP request to the Azure Cosmos has failed. - """ - - def __init__(self, status_code, message="", headers=None): + def __init__(self, status_code=None, message=None, response=None, **kwargs): """ - :param int status_code: - :param str message: - + :param int status_code: HTTP response code. + :param str message: Error message. """ - if headers is None: - headers = {} - - self.status_code = status_code - self.headers = headers + self.headers = response.headers if response else {} self.sub_status = None - self._http_error_message = message + self.http_error_message = message + status = status_code or (int(response.status_code) if response else 0) + if http_constants.HttpHeaders.SubStatus in self.headers: self.sub_status = int(self.headers[http_constants.HttpHeaders.SubStatus]) - CosmosError.__init__( - self, "Status code: %d Sub-status: %d\n%s" % (self.status_code, self.sub_status, message) - ) + formatted_message = "Status code: %d Sub-status: %d\n%s" % (status, self.sub_status, str(message)) else: - CosmosError.__init__(self, "Status code: %d\n%s" % (self.status_code, message)) + formatted_message = "Status code: %d\n%s" % (status, str(message)) + + super(CosmosHttpResponseError, self).__init__(message=formatted_message, response=response, **kwargs) + self.status_code = status + + +class CosmosResourceNotFoundError(ResourceNotFoundError, CosmosHttpResponseError): + """An error response with status code 404.""" -class JSONParseFailure(CosmosError): - """Raised when fails to parse JSON message. - """ +class CosmosResourceExistsError(ResourceExistsError, CosmosHttpResponseError): + """An error response with status code 409.""" -class UnexpectedDataType(CosmosError): - """Raised when unexpected data type is provided as parameter. - """ +class CosmosAccessConditionFailedError(CosmosHttpResponseError): + """An error response with status code 412.""" diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py index b6ea40ee7f45..da327a708548 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py @@ -23,7 +23,7 @@ """ -class HttpMethods: +class HttpMethods(object): """Constants of http methods. """ @@ -35,7 +35,7 @@ class HttpMethods: Options = "OPTIONS" -class HttpHeaders: +class HttpHeaders(object): """Constants of http headers. """ @@ -197,14 +197,14 @@ class HttpHeaders: AllowTentativeWrites = "x-ms-cosmos-allow-tentative-writes" -class HttpHeaderPreferenceTokens: +class HttpHeaderPreferenceTokens(object): """Constants of http header preference tokens. """ PreferUnfilteredQueryResponse = "PreferUnfilteredQueryResponse" -class HttpStatusDescriptions: +class HttpStatusDescriptions(object): """Constants of http status descriptions. """ @@ -234,7 +234,7 @@ class HttpStatusDescriptions: RetryWith = "Retry the request" -class QueryStrings: +class QueryStrings(object): """Constants of query strings. """ @@ -252,23 +252,22 @@ class QueryStrings: Generic = "generic" -class CookieHeaders: +class CookieHeaders(object): """Constants of cookie headers. """ SessionToken = "x-ms-session-token" -class Versions: +class Versions(object): """Constants of versions. """ CurrentVersion = "2018-12-31" SDKName = "azure-cosmos" - SDKVersion = "4.0.0a1" -class Delimiters: +class Delimiters(object): """Constants of delimiters. """ @@ -276,7 +275,7 @@ class Delimiters: ClientContinuationFormat = "{0}!!{1}" -class HttpListenerErrorCodes: +class HttpListenerErrorCodes(object): """Constants of http listener error codes. """ @@ -284,14 +283,14 @@ class HttpListenerErrorCodes: ERROR_CONNECTION_INVALID = 1229 -class HttpContextProperties: +class HttpContextProperties(object): """Constants of http context properties. """ SubscriptionId = "SubscriptionId" -class _ErrorCodes: +class _ErrorCodes(object): """Windows Socket Error Codes """ @@ -316,7 +315,7 @@ class _ErrorCodes: LinuxConnectionReset = 131 -class StatusCodes: +class StatusCodes(object): """HTTP status codes returned by the REST operations """ @@ -350,7 +349,7 @@ class StatusCodes: OPERATION_CANCELLED = 1201 -class SubStatusCodes: +class SubStatusCodes(object): """Sub status codes returned by the REST operations specifying the details of the operation """ @@ -385,7 +384,7 @@ class SubStatusCodes: INSUFFICIENT_BINDABLE_PARTITIONS = 1007 -class ResourceType: +class ResourceType(object): """Types of resources in Azure Cosmos """ diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py index 4e7b240d1c8f..c4087542f003 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py @@ -21,6 +21,7 @@ """Represents an offer in the Azure Cosmos DB SQL API service. """ +from typing import Dict, Any class Offer(dict): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py b/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py index 44a5a7ee2fc3..3432e741de8c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py @@ -21,13 +21,14 @@ """Represents a Permission object in the Azure Cosmos DB SQL API service. """ +from typing import Dict, Any, Union from .documents import PermissionMode -class Permission: +class Permission(object): def __init__(self, id, user_link, permission_mode, resource_link, properties): # pylint: disable=redefined-builtin - # type: (str, str, PermissionMode, str, Dict[str, Any]) -> None + # type: (str, str, Union[str, PermissionMode], str, Dict[str, Any]) -> None self.id = id self.user_link = user_link self.permission_mode = permission_mode diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py index a0e889e8b8ae..d7208f918c4c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py @@ -22,24 +22,30 @@ """Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union +from typing import Any, List, Dict, Union, Iterable, Optional import six from azure.cosmos._cosmos_client_connection import CosmosClientConnection +from ._base import build_options from .partition_key import NonePartitionKeyValue -from ._query_iterable import QueryIterable # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class ScriptType: +class ScriptType(object): StoredProcedure = "sprocs" Trigger = "triggers" UserDefinedFunction = "udfs" -class Scripts: +class ScriptsProxy(object): + """ + An interface to interact with stored procedures. + This class should not be instantiated directly, use :func:`ContainerProxy.scripts` attribute. + """ + def __init__(self, client_connection, container_link, is_system_key): # type: (CosmosClientConnection, str, bool) -> None self.client_connection = client_connection @@ -52,35 +58,37 @@ def _get_resource_link(self, script_or_id, typ): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] - def list_stored_procedures(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all stored procedures in the container. + def list_stored_procedures(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all stored procedures in the container. - :param max_item_count: Max number of items to be returned in the enumeration operation. + :param int max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of stored procedures (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - return self.client_connection.ReadStoredProcedures(collection_link=self.container_link, options=feed_options) + return self.client_connection.ReadStoredProcedures( + collection_link=self.container_link, options=feed_options, **kwargs + ) - def query_stored_procedures(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all stored procedures matching the given `query`. + def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all stored procedures matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of stored procedures (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -88,99 +96,106 @@ def query_stored_procedures(self, query, parameters=None, max_item_count=None, f collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_stored_procedure(self, sproc, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the stored procedure identified by `id`. :param sproc: The ID (name) or dict representing stored procedure to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved stored procedure. - :raise `HTTPFailure`: If the given stored procedure couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) - def create_stored_procedure(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a stored procedure in the container. + def create_stored_procedure(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a stored procedure in the container. + To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. :param body: A dict-like object representing the sproc to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new stored procedure. - :raise `HTTPFailure`: If the given stored procedure couldn't be created. - - To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. - + :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateStoredProcedure( - collection_link=self.container_link, sproc=body, options=request_options + collection_link=self.container_link, sproc=body, options=request_options, **kwargs ) - def replace_stored_procedure(self, sproc, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified stored procedure if it exists in the container. + def replace_stored_procedure(self, sproc, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified stored procedure if it exists in the container. :param sproc: The ID (name) or dict representing stored procedure to be replaced. :param body: A dict-like object representing the sproc to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the stored procedure after replace went through. - :raise `HTTPFailure`: If the replace failed or the stored procedure with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the stored procedure with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), sproc=body, options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc=body, + options=request_options, + **kwargs ) - def delete_stored_procedure(self, sproc, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified stored procedure from the container. + def delete_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified stored procedure from the container. :param sproc: The ID (name) or dict representing stored procedure to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The sproc wasn't deleted successfully. If the sproc does not + :raises `CosmosHttpResponseError`: The sproc wasn't deleted successfully. If the sproc does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) def execute_stored_procedure( - self, sproc, partition_key=None, params=None, enable_script_logging=None, request_options=None + self, + sproc, # type: Union[str, Dict[str, Any]] + partition_key=None, # type: Optional[str] + params=None, # type: Optional[List[Any]] + enable_script_logging=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (Union[str, Dict[str, Any]], str, List[Any], bool, Dict[str, Any]) -> Any - """ execute the specified stored procedure. + # type: (...) -> Any + """ + Execute the specified stored procedure. :param sproc: The ID (name) or dict representing stored procedure to be executed. :param params: List of parameters to be passed to the stored procedure to be executed. - :param enable_script_logging: Enables or disables script logging for the current request. + :param bool enable_script_logging: Enables or disables script logging for the current request. :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. :param request_options: Dictionary of additional properties to be used for the request. - :returns: result of the executed stored procedure for the given parameters. - :raise `HTTPFailure`: If the stored procedure execution failed or if the stored procedure with + :returns: Result of the executed stored procedure for the given parameters. + :raise `CosmosHttpResponseError`: If the stored procedure execution failed or if the stored procedure with given id does not exists in the container. - + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) if partition_key is not None: request_options["partitionKey"] = ( CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) @@ -194,37 +209,40 @@ def execute_stored_procedure( sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), params=params, options=request_options, + **kwargs ) - def list_triggers(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all triggers in the container. + def list_triggers(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all triggers in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of triggers (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - return self.client_connection.ReadTriggers(collection_link=self.container_link, options=feed_options) + return self.client_connection.ReadTriggers( + collection_link=self.container_link, options=feed_options, **kwargs + ) - def query_triggers(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all triggers matching the given `query`. + def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all triggers matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of triggers (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -232,111 +250,113 @@ def query_triggers(self, query, parameters=None, max_item_count=None, feed_optio collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_trigger(self, trigger, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the trigger identified by `id`. :param trigger: The ID (name) or dict representing trigger to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved trigger. - :raise `HTTPFailure`: If the given trigger couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given trigger couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) - def create_trigger(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a trigger in the container. + def create_trigger(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a trigger in the container. + To replace an existing trigger, use the :func:`ContainerProxy.scripts.replace_trigger` method. :param body: A dict-like object representing the trigger to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new trigger. - :raise `HTTPFailure`: If the given trigger couldn't be created. - - To replace an existing trigger, use the :func:`Container.scripts.replace_trigger` method. - + :raise `CosmosHttpResponseError`: If the given trigger couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateTrigger( - collection_link=self.container_link, trigger=body, options=request_options + collection_link=self.container_link, trigger=body, options=request_options, **kwargs ) - def replace_trigger(self, trigger, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified tigger if it exists in the container. + def replace_trigger(self, trigger, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified tigger if it exists in the container. :param trigger: The ID (name) or dict representing trigger to be replaced. :param body: A dict-like object representing the trigger to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the trigger after replace went through. - :raise `HTTPFailure`: If the replace failed or the trigger with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the trigger with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), trigger=body, options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), + trigger=body, + options=request_options, + **kwargs ) - def delete_trigger(self, trigger, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified trigger from the container. + def delete_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified trigger from the container. :param trigger: The ID (name) or dict representing trigger to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The trigger wasn't deleted successfully. If the trigger does not + :raises `CosmosHttpResponseError`: The trigger wasn't deleted successfully. If the trigger does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) - def list_user_defined_functions(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all user defined functions in the container. + def list_user_defined_functions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all user defined functions in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of user defined functions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count return self.client_connection.ReadUserDefinedFunctions( - collection_link=self.container_link, options=feed_options + collection_link=self.container_link, options=feed_options, **kwargs ) - def query_user_defined_functions(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all user defined functions matching the given `query`. + def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all user defined functions matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of user defined functions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -344,76 +364,79 @@ def query_user_defined_functions(self, query, parameters=None, max_item_count=No collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_user_defined_function(self, udf, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the stored procedure identified by `id`. :param udf: The ID (name) or dict representing udf to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved user defined function. - :raise `HTTPFailure`: If the given user defined function couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given user defined function couldn't be retrieved. + :rtype: Iterable[dict[str, Any]] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs ) - def create_user_defined_function(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a user defined function in the container. + def create_user_defined_function(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a user defined function in the container. + To replace an existing udf, use the :func:`ContainerProxy.scripts.replace_user_defined_function` method. :param body: A dict-like object representing the udf to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new user defined function. - :raise `HTTPFailure`: If the given user defined function couldn't be created. - - To replace an existing udf, use the :func:`Container.scripts.replace_user_defined_function` method. - + :raise `CosmosHttpResponseError`: If the given user defined function couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateUserDefinedFunction( - collection_link=self.container_link, udf=body, options=request_options + collection_link=self.container_link, udf=body, options=request_options, **kwargs ) - def replace_user_defined_function(self, udf, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified user defined function if it exists in the container. + def replace_user_defined_function(self, udf, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified user defined function if it exists in the container. :param udf: The ID (name) or dict representing udf to be replaced. :param body: A dict-like object representing the udf to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the user defined function after replace went through. - :raise `HTTPFailure`: If the replace failed or the user defined function with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the user defined function with + given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), udf=body, options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), + udf=body, + options=request_options, + **kwargs ) - def delete_user_defined_function(self, udf, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified user defined function from the container. + def delete_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified user defined function from the container. :param udf: The ID (name) or dict representing udf to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The udf wasn't deleted successfully. If the udf does not + :raises `CosmosHttpResponseError`: The udf wasn't deleted successfully. If the udf does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py index b33a36fc81eb..9328548206b8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py @@ -19,19 +19,27 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + """Create, read, update and delete permissions in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast +from typing import Any, List, Dict, Union, cast, Iterable, Optional import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options from .permission import Permission -class User: +class UserProxy(object): + """ + An interface to interact with a specific user. + This class should not be instantiated directly, use :func:`DatabaseProxy.get_user_client` method. + """ + def __init__(self, client_connection, id, database_link, properties=None): # pylint: disable=redefined-builtin # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None self.client_connection = client_connection @@ -52,44 +60,45 @@ def _get_permission_link(self, permission_or_id): def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @distributed_trace - def read(self, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Optional[Callable]) -> User + def read(self, **kwargs): + # type: (Any) -> Dict[str, Any] """ Read user propertes. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the retrieved user. - :raise `HTTPFailure`: If the given user couldn't be retrieved. - + :returns: A :class:`UserProxy` instance representing the retrieved user. + :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) self._properties = self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace - def read_all_permissions(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all permission for the user. + def list_permissions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all permission for the user. :param max_item_count: Max number of permissions to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of permissions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -106,12 +115,11 @@ def query_permissions( query, parameters=None, max_item_count=None, - feed_options=None, - response_hook=None, **kwargs ): - # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all permissions matching the given `query`. + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all permissions matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -119,10 +127,10 @@ def query_permissions( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of permissions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -139,8 +147,8 @@ def query_permissions( return result @distributed_trace - def get_permission(self, permission, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Optional[Callable]) -> Permission + def get_permission(self, permission, **kwargs): + # type: (str, Any) -> Permission """ Get the permission identified by `id`. @@ -149,43 +157,43 @@ def get_permission(self, permission, request_options=None, response_hook=None, * :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved permission. - :raise `HTTPFailure`: If the given permission couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given permission couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - permission = self.client_connection.ReadPermission( + permission_resp = self.client_connection.ReadPermission( permission_link=self._get_permission_link(permission), options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, permission) + response_hook(self.client_connection.last_response_headers, permission_resp) return Permission( - id=permission["id"], + id=permission_resp["id"], user_link=self.user_link, - permission_mode=permission["permissionMode"], - resource_link=permission["resource"], - properties=permission, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, ) @distributed_trace - def create_permission(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Create a permission for the user. + def create_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """ + Create a permission for the user. + To update or replace an existing permision, use the :func:`UserProxy.upsert_permission` method. :param body: A dict-like object representing the permission to create. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new permission. - :raise `HTTPFailure`: If the given permission couldn't be created. - - To update or replace an existing permision, use the :func:`User.upsert_permission` method. - + :raise `CosmosHttpResponseError`: If the given permission couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) permission = self.client_connection.CreatePermission( user_link=self.user_link, permission=body, options=request_options, **kwargs @@ -203,21 +211,21 @@ def create_permission(self, body, request_options=None, response_hook=None, **kw ) @distributed_trace - def upsert_permission(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Insert or update the specified permission. + def upsert_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """ + Insert or update the specified permission. + If the permission already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the permission to update or insert. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted permission. - :raise `HTTPFailure`: If the given permission could not be upserted. - - If the permission already exists in the container, it is replaced. If it does not, it is inserted. + :raise `CosmosHttpResponseError`: If the given permission could not be upserted. + :rtype: dict[str, Any] """ - - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) permission = self.client_connection.UpsertPermission( user_link=self.user_link, permission=body, options=request_options, **kwargs @@ -235,9 +243,10 @@ def upsert_permission(self, body, request_options=None, response_hook=None, **kw ) @distributed_trace - def replace_permission(self, permission, body, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Replaces the specified permission if it exists for the user. + def replace_permission(self, permission, body, **kwargs): + # type: (str, Dict[str, Any], Any) -> Permission + """ + Replaces the specified permission if it exists for the user. :param permission: The ID (name), dict representing the properties or :class:`Permission` instance of the permission to be replaced. @@ -245,43 +254,43 @@ def replace_permission(self, permission, body, request_options=None, response_ho :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the permission after replace went through. - :raise `HTTPFailure`: If the replace failed or the permission with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the permission with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - permission = self.client_connection.ReplacePermission( + permission_resp = self.client_connection.ReplacePermission( permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, permission) + response_hook(self.client_connection.last_response_headers, permission_resp) return Permission( - id=permission["id"], + id=permission_resp["id"], user_link=self.user_link, - permission_mode=permission["permissionMode"], - resource_link=permission["resource"], - properties=permission, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, ) @distributed_trace - def delete_permission(self, permission, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified permission from the user. + def delete_permission(self, permission, **kwargs): + # type: (str, Any) -> None + """ + Delete the specified permission from the user. :param permission: The ID (name), dict representing the properties or :class:`Permission` instance of the permission to be replaced. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The permission wasn't deleted successfully. If the permission does + :raises `CosmosHttpResponseError`: The permission wasn't deleted successfully. If the permission does not exist for the user, a `404` error is returned. - + :rtype: None """ - - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.DeletePermission( permission_link=self._get_permission_link(permission), options=request_options, **kwargs diff --git a/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py index b800f818104e..1056c146bdbd 100644 --- a/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py @@ -74,12 +74,8 @@ def run_sample(): # setup database for this sample try: db = client.create_database(id=DATABASE_ID) - - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + pass # setup container for this sample try: @@ -89,11 +85,8 @@ def run_sample(): ) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) ChangeFeedManagement.CreateItems(container, 100) ChangeFeedManagement.ReadChangeFeed(container) @@ -101,14 +94,10 @@ def run_sample(): # cleanup database after sample try: client.delete_database(db) + except errors.CosmosResourceNotFoundError: + pass - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) - - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py index 9bad40d4b0df..fa877125c4e0 100644 --- a/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py @@ -92,11 +92,8 @@ def create_Container(db, id): db.create_container(id=id, partition_key=partition_key) print('Container with id \'{0}\' created'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(id)) print("\n2.2 Create Container - With custom index policy") @@ -114,15 +111,13 @@ def create_Container(db, id): partition_key=partition_key, indexing_policy=coll['indexingPolicy'] ) + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('IndexPolicy Mode - \'{0}\''.format(container.properties['indexingPolicy']['indexingMode'])) - print('IndexPolicy Automatic - \'{0}\''.format(container.properties['indexingPolicy']['automatic'])) + print('IndexPolicy Mode - \'{0}\''.format(properties['indexingPolicy']['indexingMode'])) + print('IndexPolicy Automatic - \'{0}\''.format(properties['indexingPolicy']['automatic'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container['id'])) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) print("\n2.3 Create Container - With custom offer throughput") @@ -135,11 +130,8 @@ def create_Container(db, id): ) print('Container with id \'{0}\' created'.format(container.id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) print("\n2.4 Create Container - With Unique keys") @@ -149,15 +141,13 @@ def create_Container(db, id): partition_key=partition_key, unique_key_policy={'uniqueKeys': [{'paths': ['/field1/field2', '/field3']}]} ) - unique_key_paths = container.properties['uniqueKeyPolicy']['uniqueKeys'][0]['paths'] + properties = container.read() + unique_key_paths = properties['uniqueKeyPolicy']['uniqueKeys'][0]['paths'] print('Container with id \'{0}\' created'.format(container.id)) print('Unique Key Paths - \'{0}\', \'{1}\''.format(unique_key_paths[0], unique_key_paths[1])) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'container_unique_keys\' already exists') print("\n2.5 Create Collection - With Partition key V2 (Default)") @@ -166,15 +156,12 @@ def create_Container(db, id): id="collection_partition_key_v2", partition_key=PartitionKey(path='/id', kind='Hash') ) - + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('Partition Key - \'{0}\''.format(container.properties['partitionKey'])) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'collection_partition_key_v2\' already exists') print("\n2.6 Create Collection - With Partition key V1") @@ -183,15 +170,12 @@ def create_Container(db, id): id="collection_partition_key_v1", partition_key=PartitionKey(path='/id', kind='Hash', version=1) ) - + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('Partition Key - \'{0}\''.format(container.properties['partitionKey'])) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'collection_partition_key_v1\' already exists') @staticmethod def manage_offer_throughput(db, id): @@ -211,11 +195,8 @@ def manage_offer_throughput(db, id): print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(offer.properties['id'], container.id, offer.properties['content']['offerThroughput'])) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' does not exist'.format(id)) print("\n3.2 Change Offer Throughput of Container") @@ -233,11 +214,8 @@ def read_Container(db, id): container = db.get_container_client(id) print('Container with id \'{0}\' was found, it\'s link is {1}'.format(container.id, container.container_link)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) @staticmethod def list_Containers(db): @@ -245,7 +223,7 @@ def list_Containers(db): print('Containers:') - containers = list(db.read_all_containers()) + containers = list(db.list_containers()) if not containers: return @@ -262,11 +240,8 @@ def delete_Container(db, id): print('Container with id \'{0}\' was deleted'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) def run_sample(): @@ -276,11 +251,8 @@ def run_sample(): try: db = client.create_database(id=DATABASE_ID) - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + db = client.get_database_client(DATABASE_ID) # query for a container ContainerManagement.find_container(db, CONTAINER_ID) @@ -304,13 +276,10 @@ def run_sample(): try: client.delete_database(db) - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + pass - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py index f424c98ffa3e..f1535c93961b 100644 --- a/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py @@ -68,11 +68,8 @@ def create_database(client, id): client.create_database(id=id) print('Database with id \'{0}\' created'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A database with id \'{0}\' already exists'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A database with id \'{0}\' already exists'.format(id)) @staticmethod def read_database(client, id): @@ -82,11 +79,8 @@ def read_database(client, id): database = client.get_database_client(id) print('Database with id \'{0}\' was found, it\'s link is {1}'.format(id, database.database_link)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A database with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) @staticmethod def list_databases(client): @@ -94,7 +88,7 @@ def list_databases(client): print('Databases:') - databases = list(client.read_all_databases()) + databases = list(client.list_databases()) if not databases: return @@ -111,11 +105,8 @@ def delete_database(client, id): print('Database with id \'{0}\' was deleted'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A database with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) def run_sample(): with IDisposable(cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY} )) as client: @@ -135,7 +126,7 @@ def run_sample(): # delete database by id DatabaseManagement.delete_database(client, DATABASE_ID) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py index 51754fe8dbf9..31f2953f3dae 100644 --- a/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py @@ -179,22 +179,16 @@ def run_sample(): try: db = client.create_database(id=DATABASE_ID) - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + pass # setup container for this sample try: container = db.create_container(id=CONTAINER_ID, partition_key=PartitionKey(path='/id', kind='Hash')) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) ItemManagement.CreateItems(container) ItemManagement.ReadItem(container, 'SalesOrder1') @@ -208,13 +202,10 @@ def run_sample(): try: client.delete_database(db) - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + pass - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py index b50b5c684a96..d4a945ac699b 100644 --- a/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py @@ -42,7 +42,8 @@ def ObtainClient(): urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) connection_policy.SSLConfiguration.SSLCaCerts = False - return cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY}, "Session", connection_policy) + return cosmos_client.CosmosClient(HOST, MASTER_KEY, "Session", connection_policy=connection_policy) + # Query for Entity / Entities def Query_Entities(parent, entity_type, id = None): @@ -56,13 +57,13 @@ def Query_Entities(parent, entity_type, id = None): try: if entity_type == 'database': if id == None: - entities = list(parent.read_all_databases()) + entities = list(parent.list_databases()) else: entities = list(parent.query_databases(find_entity_by_id_query)) elif entity_type == 'collection': if id == None: - entities = list(parent.read_all_containers()) + entities = list(parent.list_containers()) else: entities = list(parent.query_containers(find_entity_by_id_query)) @@ -71,7 +72,7 @@ def Query_Entities(parent, entity_type, id = None): entities = list(parent.read_all_items()) else: entities = list(parent.query_items(find_entity_by_id_query)) - except errors.CosmosError as e: + except errors.AzureError as e: print("The following error occured while querying for the entity / entities ", entity_type, id if id != None else "") print(e) raise @@ -81,36 +82,36 @@ def Query_Entities(parent, entity_type, id = None): return entities[0] return None + def CreateDatabaseIfNotExists(client, database_id): try: database = Query_Entities(client, 'database', id = database_id) if database == None: - database = client.create_database(id=database_id) - return client.get_database_client(database['id']) - except errors.HTTPFailure as e: - if e.status_code == 409: # Move these constants to an enum - pass - else: - raise errors.HTTPFailure(e.status_code) + return client.create_database(id=database_id) + else: + return client.get_database_client(database_id) + except errors.CosmosResourceExistsError: + pass + def DeleteContainerIfExists(db, collection_id): try: db.delete_container(collection_id) print('Collection with id \'{0}\' was deleted'.format(collection_id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - pass - elif e.status_code == 400: + except errors.CosmosResourceNotFoundError: + pass + except errors.CosmosHttpResponseError as e: + if e.status_code == 400: print("Bad request for collection link", collection_id) - raise - else: - raise + raise + def print_dictionary_items(dict): for k, v in dict.items(): print("{:<15}".format(k), v) print() + def FetchAllDatabases(client): databases = Query_Entities(client, 'database') print("-" * 41) @@ -119,6 +120,7 @@ def FetchAllDatabases(client): print_dictionary_items(db) print("-" * 41) + def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, message = "Document(s) found by query: "): try: results = list(container.query_items(query_with_optional_parameters, enable_cross_partition_query=True)) @@ -126,10 +128,10 @@ def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, mes for doc in results: print(doc) return results - except errors.HTTPFailure as e: - if e.status_code == 404: - print("Document doesn't exist") - elif e.status_code == 400: + except errors.CosmosResourceNotFoundError: + print("Document doesn't exist") + except errors.CosmosHttpResponseError as e: + if e.status_code == 400: # Can occur when we are trying to query on excluded paths print("Bad Request exception occured: ", e) pass @@ -138,6 +140,7 @@ def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, mes finally: print() + def ExplicitlyExcludeFromIndex(db): """ The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. There may be scenarios where you want to exclude a specific doc from the index even though all other @@ -153,7 +156,8 @@ def ExplicitlyExcludeFromIndex(db): print(created_Container) print("\n" + "-" * 25 + "\n1. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + properties = created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) # Create a document and query on it immediately. # Will work as automatic indexing is still True @@ -190,14 +194,11 @@ def ExplicitlyExcludeFromIndex(db): # Cleanup db.delete_container(created_Container) print("\n") - - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def UseManualIndexing(db): """The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. @@ -214,10 +215,11 @@ def UseManualIndexing(db): indexing_policy={"automatic" : False}, partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n2. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Create a document # Then query for that document @@ -254,14 +256,11 @@ def UseManualIndexing(db): # Cleanup db.delete_container(created_Container) print("\n") + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def ExcludePathsFromIndex(db): """The default behavior is for Cosmos to index every attribute in every document automatically. @@ -300,9 +299,10 @@ def ExcludePathsFromIndex(db): indexing_policy=collection_to_create['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n4. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed doc = created_Container.create_item(body=doc_with_nested_structures) @@ -329,14 +329,11 @@ def ExcludePathsFromIndex(db): # Cleanup db.delete_container(created_Container) print("\n") + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def RangeScanOnHashIndex(db): """When a range index is not available (i.e. Only hash or no index found on the path), comparisons queries can still @@ -365,9 +362,10 @@ def RangeScanOnHashIndex(db): indexing_policy=collection_to_create['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n5. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) doc1 = created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) doc2 = created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) @@ -393,13 +391,11 @@ def RangeScanOnHashIndex(db): # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def UseRangeIndexesOnStrings(db): """Showing how range queries can be performed even on strings. @@ -458,9 +454,10 @@ def UseRangeIndexesOnStrings(db): indexing_policy=collection_definition['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n6. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) created_Container.create_item(body={ "id" : "doc1", "region" : "USA" }) created_Container.create_item(body={ "id" : "doc2", "region" : "UK" }) @@ -481,13 +478,11 @@ def UseRangeIndexesOnStrings(db): # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def PerformIndexTransformations(db): try: @@ -495,21 +490,22 @@ def PerformIndexTransformations(db): # Create a collection with default indexing policy created_Container = db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n7. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Insert some documents doc1 = created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) doc2 = created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) doc3 = created_Container.create_item(body={ "id" : "dyn3", "length" : 2 }) - print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"], " with indexing mode", created_Container.properties['indexingPolicy']['indexingMode']) + print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"], " with indexing mode", properties['indexingPolicy']['indexingMode']) # Switch to use string & number range indexing with maximum precision. print("Changing to string & number range indexing with maximum precision (needed for Order By).") - created_Container.properties['indexingPolicy']['includedPaths'][0]['indexes'] = [{ + properties['indexingPolicy']['includedPaths'][0]['indexes'] = [{ 'kind': documents.IndexKind.Range, 'dataType': documents.DataType.String, 'precision': -1 @@ -518,34 +514,34 @@ def PerformIndexTransformations(db): created_Container = db.replace_container( container=created_Container.id, partition_key=PARTITION_KEY, - indexing_policy=created_Container.properties['indexingPolicy'] + indexing_policy=properties['indexingPolicy'] ) + properties = created_Container.read() # Check progress and wait for completion - should be instantaneous since we have only a few documents, but larger # collections will take time. - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Now exclude a path from indexing to save on storage space. print("Now excluding the path /length/ to save on storage space") - created_Container.properties['indexingPolicy']['excludedPaths'] = [{"path" : "/length/*"}] + properties['indexingPolicy']['excludedPaths'] = [{"path" : "/length/*"}] created_Container = db.replace_container( container=created_Container.id, partition_key=PARTITION_KEY, - indexing_policy=created_Container.properties['indexingPolicy'] + indexing_policy=properties['indexingPolicy'] ) - print_dictionary_items(created_Container.properties["indexingPolicy"]) + properties = created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def PerformMultiOrderbyQuery(db): try: @@ -590,11 +586,11 @@ def PerformMultiOrderbyQuery(db): indexing_policy=indexing_policy, partition_key=PARTITION_KEY ) - + properties = created_container.read() print(created_container) print("\n" + "-" * 25 + "\n8. Collection created with index policy") - print_dictionary_items(created_container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Insert some documents doc1 = created_container.create_item(body={"id": "doc1", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "1"}) @@ -632,109 +628,11 @@ def PerformMultiOrderbyQuery(db): # Cleanup db.delete_container(created_container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise - -def PerformMultiOrderbyQuery(client, database_id): - try: - DeleteContainerIfExists(client, database_id, COLLECTION_ID) - database_link = GetDatabaseLink(database_id) - - # Create a collection with composite indexes - indexingPolicy = { - "compositeIndexes": [ - [ - { - "path": "/numberField", - "order": "ascending" - }, - { - "path": "/stringField", - "order": "descending" - } - ], - [ - { - "path": "/numberField", - "order": "descending" - }, - { - "path": "/stringField", - "order": "ascending" - }, - { - "path": "/numberField2", - "order": "descending" - }, - { - "path": "/stringField2", - "order": "ascending" - } - ] - ] - } - - container_definition = { - 'id': COLLECTION_ID, - 'indexingPolicy': indexingPolicy - } - - created_container = client.CreateContainer(database_link, container_definition) - - print(created_container) - - print("\n" + "-" * 25 + "\n8. Collection created with index policy") - print_dictionary_items(created_container["indexingPolicy"]) - - # Insert some documents - collection_link = GetContainerLink(database_id, COLLECTION_ID) - doc1 = client.CreateItem(collection_link, {"id": "doc1", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "1"}) - doc2 = client.CreateItem(collection_link, {"id": "doc2", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "2"}) - doc3 = client.CreateItem(collection_link, {"id": "doc3", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "1"}) - doc4 = client.CreateItem(collection_link, {"id": "doc4", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "2"}) - doc5 = client.CreateItem(collection_link, {"id": "doc5", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "1"}) - doc6 = client.CreateItem(collection_link, {"id": "doc6", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "2"}) - doc7 = client.CreateItem(collection_link, {"id": "doc7", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "1"}) - doc8 = client.CreateItem(collection_link, {"id": "doc8", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "2"}) - doc9 = client.CreateItem(collection_link, {"id": "doc9", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "1"}) - doc10 = client.CreateItem(collection_link, {"id": "doc10", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "2"}) - doc11 = client.CreateItem(collection_link, {"id": "doc11", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "1"}) - doc12 = client.CreateItem(collection_link, {"id": "doc12", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "2"}) - doc13 = client.CreateItem(collection_link, {"id": "doc13", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "1"}) - doc14 = client.CreateItem(collection_link, {"id": "doc14", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "2"}) - doc15 = client.CreateItem(collection_link, {"id": "doc15", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "1"}) - doc16 = client.CreateItem(collection_link, {"id": "doc16", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "2"}) - - print("Query documents and Order by 1st composite index: Ascending numberField and Descending stringField:") - - query = { - "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC", - } - QueryDocumentsWithCustomQuery(client, collection_link, query) - - print("Query documents and Order by inverted 2nd composite index -") - print("Ascending numberField, Descending stringField, Ascending numberField2, Descending stringField2") - - query = { - "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC, r.numberField2 ASC, r.stringField2 DESC", - } - QueryDocumentsWithCustomQuery(client, collection_link, query) + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - # Cleanup - client.DeleteContainer(collection_link) - print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def RunIndexDemo(): try: @@ -766,10 +664,7 @@ def RunIndexDemo(): # 8. Perform Multi Orderby queries using composite indexes PerformMultiOrderbyQuery(created_db) - # 8. Perform Multi Orderby queries using composite indexes - PerformMultiOrderbyQuery(client, DATABASE_ID) - - except errors.CosmosError as e: + except errors.AzureError as e: raise e if __name__ == '__main__': @@ -777,4 +672,4 @@ def RunIndexDemo(): RunIndexDemo() except Exception as e: - print("Top level Error: args:{0}, message:N/A".format(e.args)) + print("Top level Error: args:{0}, message:N/A".format(e.args)) diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py index a714278dc71d..40f64026ebec 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py @@ -27,11 +27,8 @@ def initialize_async(self): database = None try: database = create_client.ReadDatabase("dbs/" + self.database_name) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("database not found, needs to be created.") - else: - raise e + except errors.CosmosResourceNotFoundError: + print("database not found, needs to be created.") if not database: database = {'id': self.database_name} @@ -122,20 +119,15 @@ def initialize_async(self): } try: lww_sproc = create_client.CreateStoredProcedure("dbs/" + self.database_name+ "/colls/" + self.udp_collection_name, lww_sproc) - except errors.CosmosError as e: - if e.status_code == StatusCodes.CONFLICT: - return - raise e + except errors.CosmosResourceExistsError: + return def try_create_document_collection (self, client, database, collection): read_collection = None try: read_collection = client.ReadContainer("dbs/" + database['id'] + "/colls/" + collection['id']) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("collection not found, needs to be created.") - else: - raise errors + except errors.CosmosResourceNotFoundError: + print("collection not found, needs to be created.") if read_collection == None: collection['partitionKey'] = {'paths': ['/id'],'kind': 'Hash'} @@ -481,33 +473,25 @@ def run_delete_conflict_on_UDP_async(self): def try_insert_document(self, client, collection_uri, document): try: return client.CreateItem(collection_uri, document) - except errors.CosmosError as e: - if e.status_code == StatusCodes.CONFLICT: - return None - raise e + except errors.CosmosResourceExistsError: + return None def try_update_document(self, client, collection_uri, document, options): try: options['partitionKey'] = document['id'] return client.ReplaceItem(collection_uri + "/docs/" + document['id'], document, options); - except errors.CosmosError as e: - if (e.status_code == StatusCodes.PRECONDITION_FAILED or - e.status_code == StatusCodes.NOT_FOUND): - # Lost synchronously or no document yet. No conflict is induced. - return None - raise e + except (errors.CosmosResourceNotFoundError, errors.CosmosAccessConditionFailedError): + # Lost synchronously or no document yet. No conflict is induced. + return None def try_delete_document(self, client, collection_uri, document, options): try: options['partitionKey'] = document['id'] client.DeleteItem(collection_uri + "/docs/" + document['id'], options) return document - except errors.CosmosError as e: - if (e.status_code == StatusCodes.PRECONDITION_FAILED or - e.status_code == StatusCodes.NOT_FOUND): - #Lost synchronously. No conflict is induced. - return None - raise e + except (errors.CosmosResourceNotFoundError, errors.CosmosAccessConditionFailedError): + #Lost synchronously. No conflict is induced. + return None def try_update_or_delete_document(self, client, collection_uri, conflict_document, options): if int(conflict_document['regionId']) % 2 == 1: @@ -607,16 +591,14 @@ def validate_LWW_async_internal(self, client, conflict_document, has_delete_conf (conflict_document[0]['id'], client.ReadEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("Delete conflict won @ %s" % client.ReadEndpoint) - return - else: - - self.trace_error("Delete conflict for document %s didnt win @ %s" % - (conflict_document[0]['id'], client.ReadEndpoint)) + except errors.CosmosResourceNotFoundError: + print("Delete conflict won @ %s" % client.ReadEndpoint) + return + except errors.CosmosHttpResponseError: + self.trace_error("Delete conflict for document %s didnt win @ %s" % + (conflict_document[0]['id'], client.ReadEndpoint)) - time.sleep(0.5) + time.sleep(0.5) winner_document = None @@ -640,7 +622,7 @@ def validate_LWW_async_internal(self, client, conflict_document, has_delete_conf (int(winner_document["regionId"]), client.WriteEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: + except errors.AzureError as e: self.trace_error("Winner document from region %d is not found @ %s, retrying..." % (int(winner_document["regionId"]), client.WriteEndpoint)) @@ -673,15 +655,13 @@ def validate_UDP_async_internal(self, client, conflict_document, has_delete_conf (conflict_document[0]['id'], client.ReadEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("Delete conflict won @ %s" % client.ReadEndpoint) - return - else: - self.trace_error("Delete conflict for document %s didnt win @ %s" % - (conflict_document[0]['id'], client.ReadEndpoint)) - - time.sleep(0.5) + except errors.CosmosResourceNotFoundError: + print("Delete conflict won @ %s" % client.ReadEndpoint) + return + except errors.CosmosHttpResponseError: + self.trace_error("Delete conflict for document %s didnt win @ %s" % + (conflict_document[0]['id'], client.ReadEndpoint)) + time.sleep(0.5) winner_document = None @@ -705,10 +685,9 @@ def validate_UDP_async_internal(self, client, conflict_document, has_delete_conf (int(winner_document['regionId']), client.WriteEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: + except errors.AzureError: self.trace_error("Winner document from region %d is not found @ %s, retrying..." % (int(winner_document['regionId']), client.WriteEndpoint)) - time.sleep(0.5) def trace_error(self, message): diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py index 8b97f0899838..453f8caa9fe9 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py @@ -27,7 +27,12 @@ def __init__(self): connection_policy.UseMultipleWriteLocations = True connection_policy.PreferredLocations = [region] - client = cosmos_client_connection.CosmosClientConnection(self.account_endpoint, {'masterKey': self.account_key}, connection_policy, documents.ConsistencyLevel.Session) + client = cosmos_client_connection.CosmosClientConnection( + self.account_endpoint, + {'masterKey': self.account_key}, + connection_policy, + documents.ConsistencyLevel.Session + ) self.workers.append(Worker(client, self.database_name, self.basic_collection_name)) diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py index 28eeaefe5291..38af9a920314 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py @@ -62,10 +62,10 @@ def delete_all_async(self): while doc: try: self.client.DeleteItem(doc['_self'], {'partitionKey': doc['id']}) - except errors.CosmosError as e: - if e.status_code != StatusCodes.NOT_FOUND: - print("Error occurred while deleting document from %s" % self.client.WriteEndpoint) - else: - raise e + except errors.CosmosResourceNotFoundError: + raise + except errors.CosmosHttpResponseError as e: + print("Error occurred while deleting document from %s" % self.client.WriteEndpoint) + doc = next(it, None) print("Deleted all documents from region %s" % self.client.WriteEndpoint) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py b/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py index baaf8f5e41b8..5be4ad1345a9 100644 --- a/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py @@ -97,7 +97,7 @@ def CreateNonPartitionedCollection(db): # python 3 compatible: convert data from byte to unicode string data = data.decode('utf-8') data = json.loads(data) - created_collection = db.get_container_client(data['id']) + created_collection = db.get_container_client("mycoll") # Create a document in the non partitioned collection using the rest API and older version resource_url = base_url_split[0] + ":" + base_url_split[1] + ":" + base_url_split[2].split("/")[0] \ @@ -122,7 +122,7 @@ def CreateNonPartitionedCollection(db): data = data.decode('utf-8') data = json.loads(data) created_document = data - return created_collection, created_document + return created_collection, "SaledOrder0" @staticmethod def get_authorization(client, verb, resource_id_or_fullname, resource_type, headers): @@ -162,7 +162,7 @@ def ReadItem(container, doc_id): print('\n1.2 Reading Item by Id\n') # Note that Reads require a partition key to be spcified. - response = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + response = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) print('Item read by Id {0}'.format(doc_id)) print('Account Number: {0}'.format(response.get('account_number'))) @@ -175,7 +175,7 @@ def ReadItems(container): # NOTE: Use MaxItemCount on Options to control how many items come back per trip to the server # Important to handle throttles whenever you are doing operations such as this that might # result in a 429 (throttled request) - item_list = list(container.list_items(max_item_count=10)) + item_list = list(container.read_all_items(max_item_count=10)) print('Found {0} items'.format(item_list.__len__())) @@ -201,7 +201,7 @@ def QueryItems(container, doc_id): def ReplaceItem(container, doc_id): print('\n1.5 Replace an Item\n') - read_item = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) read_item['subtotal'] = read_item['subtotal'] + 1 response = container.replace_item(item=read_item, body=read_item) @@ -211,7 +211,7 @@ def ReplaceItem(container, doc_id): def UpsertItem(container, doc_id): print('\n1.6 Upserting an item\n') - read_item = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) read_item['subtotal'] = read_item['subtotal'] + 1 response = container.upsert_item(body=read_item) @@ -285,26 +285,19 @@ def run_sample(): # setup database for this sample try: db = client.create_database(id=DATABASE_ID) - - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + db = client.get_database_client(DATABASE_ID) # setup container for this sample try: container, document = ItemManagement.CreateNonPartitionedCollection(db) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) # Read Item created in non partitioned collection using older API version - ItemManagement.ReadItem(container, document['id']) + ItemManagement.ReadItem(container, document) ItemManagement.CreateItems(container) ItemManagement.ReadItems(container) ItemManagement.QueryItems(container, 'SalesOrder1') @@ -315,14 +308,10 @@ def run_sample(): # cleanup database after sample try: client.delete_database(db) + except errors.CosmosResourceNotFoundError: + pass - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) - - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index c8dbdf131109..a10343334234 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -31,7 +31,7 @@ import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.documents as documents import test_config -from azure.cosmos.errors import HTTPFailure +from azure.cosmos.errors import CosmosHttpResponseError from azure.cosmos.partition_key import PartitionKey pytestmark = pytest.mark.cosmosEmulator @@ -64,8 +64,8 @@ def _setup(): "'masterKey' and 'host' at the top of this class to run the " "tests.") - mcs.client = cosmos_client.CosmosClient(_config.host, - {'masterKey': _config.master_key}, "Session", _config.connection_policy) + mcs.client = cosmos_client.CosmosClient( + _config.host, _config.master_key, "Session", connection_policy=_config.connection_policy) created_db = test_config._test_config.create_database_if_not_exist(mcs.client) mcs.created_collection = _create_collection(created_db) @@ -221,19 +221,21 @@ def invokeNext(): self.assertRaises(StopIteration, invokeNext) ###################################### - # test fetch_next_block() behavior + # test by_page() behavior ###################################### - fetched_res = result_iterable.fetch_next_block() + page_iter = result_iterable.by_page() + fetched_res = list(next(page_iter)) fetched_size = len(fetched_res) self.assertEqual(fetched_size, 1) self.assertEqual(fetched_res[0], expected) # no more results will be returned - self.assertEqual(result_iterable.fetch_next_block(), []) + with self.assertRaises(StopIteration): + next(page_iter) if isinstance(expected, Exception): - self.assertRaises(HTTPFailure, _verify_result) + self.assertRaises(CosmosHttpResponseError, _verify_result) else: _verify_result() diff --git a/sdk/cosmos/azure-cosmos/test/conftest.py b/sdk/cosmos/azure-cosmos/test/conftest.py index e0548dcc6449..cbb2191bc775 100644 --- a/sdk/cosmos/azure-cosmos/test/conftest.py +++ b/sdk/cosmos/azure-cosmos/test/conftest.py @@ -39,7 +39,7 @@ def delete_database(): masterKey = config.masterKey connectionPolicy = config.connectionPolicy try: - client = cosmos_client.CosmosClient(host, {'masterKey': masterKey}, "Session", connectionPolicy) + client = cosmos_client.CosmosClient(host, masterKey, "Session", connection_policy=connectionPolicy) # This is to soft-fail the teardown while cosmos tests are not running automatically except Exception: pass @@ -48,9 +48,8 @@ def delete_database(): for database_id in database_ids_to_delete: try: client.delete_database(database_id) - except errors.HTTPFailure as e: - if e.status_code != StatusCodes.NOT_FOUND: - raise e + except errors.CosmosResourceNotFoundError: + pass del database_ids_to_delete[:] print("Clean up completed!") diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 0f093271d7c3..14529aede445 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -84,7 +84,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -95,22 +95,22 @@ def setUpClass(cls): "You must specify your Azure Cosmos account values for " "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.databaseForTest = cls.configs.create_database_if_not_exist(cls.client) def setUp(self): - self.client = cosmos_client.CosmosClient(self.host, {'masterKey':self.masterKey}, "Session", - self.connectionPolicy) + self.client = cosmos_client.CosmosClient(self.host, self.masterKey, "Session", + connection_policy=self.connectionPolicy) def test_database_crud(self): # read databases. - databases = list(self.client.read_all_databases()) + databases = list(self.client.list_databases()) # create a database. before_create_databases_count = len(databases) database_id = str(uuid.uuid4()) created_db = self.client.create_database(database_id) self.assertEqual(created_db.id, database_id) # Read databases after creation. - databases = list(self.client.read_all_databases()) + databases = list(self.client.list_databases()) self.assertEqual(len(databases), before_create_databases_count + 1, 'create should increase the number of databases') @@ -121,8 +121,7 @@ def test_database_crud(self): {'name': '@id', 'value': database_id} ] })) - self.assert_(databases, - 'number of results for the query should be > 0') + self.assertTrue(databases, 'number of results for the query should be > 0') # read database. self.client.get_database_client(created_db.id) @@ -149,12 +148,12 @@ def test_database_level_offer_throughput(self): # Verify offer throughput for database offer = created_db.read_offer() - self.assertEquals(offer.offer_throughput, offer_throughput) + self.assertEqual(offer.offer_throughput, offer_throughput) # Update database offer throughput new_offer_throughput = 2000 offer = created_db.replace_throughput(new_offer_throughput) - self.assertEquals(offer.offer_throughput, new_offer_throughput) + self.assertEqual(offer.offer_throughput, new_offer_throughput) def test_sql_query_crud(self): # create two databases. @@ -185,7 +184,7 @@ def test_sql_query_crud(self): def test_collection_crud(self): created_db = self.databaseForTest - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) # create a collection before_create_collections_count = len(collections) collection_id = 'test_collection_crud ' + str(uuid.uuid4()) @@ -205,7 +204,7 @@ def test_collection_crud(self): self.assertEqual('consistent', created_properties['indexingPolicy']['indexingMode']) # read collections after creation - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) self.assertEqual(len(collections), before_create_collections_count + 1, 'create should increase the number of collections') @@ -303,7 +302,7 @@ def test_partitioned_collection_partition_key_extraction(self): # create document without partition key being specified created_document = created_collection.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["WA"]') + self.assertEqual(self.last_headers[1], '["WA"]') del self.last_headers[:] self.assertEqual(created_document.get('id'), document_definition.get('id')) @@ -320,7 +319,7 @@ def test_partitioned_collection_partition_key_extraction(self): # Create document with partitionkey not present as a leaf level property but a dict created_document = created_collection1.create_item(document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], [{}]) + self.assertEqual(self.last_headers[1], [{}]) del self.last_headers[:] #self.assertEqual(options['partitionKey'], documents.Undefined) @@ -336,7 +335,7 @@ def test_partitioned_collection_partition_key_extraction(self): # Create document with partitionkey not present in the document created_document = created_collection2.create_item(document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], [{}]) + self.assertEqual(self.last_headers[1], [{}]) del self.last_headers[:] #self.assertEqual(options['partitionKey'], documents.Undefined) @@ -362,7 +361,7 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): _retry_utility.ExecuteFunction = self._MockExecuteFunction created_document = created_collection1.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["val1"]') + self.assertEqual(self.last_headers[1], '["val1"]') del self.last_headers[:] collection_definition2 = { @@ -390,7 +389,7 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): # create document without partition key being specified created_document = created_collection2.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["val2"]') + self.assertEqual(self.last_headers[1], '["val2"]') del self.last_headers[:] created_db.delete_container(created_collection1.id) @@ -540,7 +539,7 @@ def test_partitioned_collection_permissions(self): resource_tokens[urllib.quote(read_collection.id)] = (read_permission.properties['_token']) restricted_client = cosmos_client.CosmosClient( - CRUDTests.host, {'resourceTokens': resource_tokens}, "Session", CRUDTests.connectionPolicy) + CRUDTests.host, resource_tokens, "Session", connection_policy=CRUDTests.connectionPolicy) document_definition = {'id': 'document1', 'key': 1 @@ -693,7 +692,7 @@ def test_partitioned_collection_conflict_crud_and_query(self): ) # Read conflict feed doesn't requires partitionKey to be specified as it's a cross partition thing - conflictlist = list(created_collection.read_all_conflicts()) + conflictlist = list(created_collection.list_conflicts()) self.assertEqual(0, len(conflictlist)) # delete conflict here will return resource not found(404) since there is no conflict here @@ -809,9 +808,7 @@ def test_document_crud(self): created_collection.replace_item, replaced_document['id'], replaced_document, - None, - None, - {'type': 'IfMatch', 'condition': old_etag}, + if_match=old_etag, ) # should pass for most recent etag @@ -1043,14 +1040,14 @@ def test_user_crud(self): # create database db = self.databaseForTest # list users - users = list(db.read_all_users()) + users = list(db.list_users()) before_create_count = len(users) # create user user_id = 'new user' + str(uuid.uuid4()) user = db.create_user(body={'id': user_id}) self.assertEqual(user.id, user_id, 'user id error') # list users after creation - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) # query users results = list(db.query_users( @@ -1087,7 +1084,7 @@ def test_user_upsert(self): db = self.databaseForTest # read users and check count - users = list(db.read_all_users()) + users = list(db.list_users()) before_create_count = len(users) # create user using Upsert API @@ -1098,7 +1095,7 @@ def test_user_upsert(self): self.assertEqual(user.id, user_id, 'user id error') # read users after creation and verify updated count - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) # Should replace the user since it already exists, there is no public property to change here @@ -1111,7 +1108,7 @@ def test_user_upsert(self): 'user id should remain same') # read users after upsert and verify count doesn't increases again - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) user_properties = user.read() @@ -1125,7 +1122,7 @@ def test_user_upsert(self): self.assertEqual(new_user.id, user.id, 'user id error') # read users after upsert and verify count increases - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 2) # delete users @@ -1133,7 +1130,7 @@ def test_user_upsert(self): db.delete_user(new_user.id) # read users after delete and verify count remains the same - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count) def test_permission_crud(self): @@ -1143,7 +1140,7 @@ def test_permission_crud(self): # create user user = db.create_user(body={'id': 'new user' + str(uuid.uuid4())}) # list permissions - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) before_create_count = len(permissions) permission = { 'id': 'new permission', @@ -1156,7 +1153,7 @@ def test_permission_crud(self): 'new permission', 'permission id error') # list permissions after creation - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # query permissions results = list(user.query_permissions( @@ -1165,7 +1162,7 @@ def test_permission_crud(self): {'name': '@id', 'value': permission.id} ] )) - self.assert_(results) + self.assertTrue(results) # replace permission change_permission = permission.properties.copy() @@ -1196,7 +1193,7 @@ def test_permission_upsert(self): user = db.create_user(body={'id': 'new user' + str(uuid.uuid4())}) # read permissions and check count - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) before_create_count = len(permissions) permission_definition = { @@ -1214,7 +1211,7 @@ def test_permission_upsert(self): 'permission id error') # read permissions after creation and verify updated count - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # update permission mode @@ -1233,7 +1230,7 @@ def test_permission_upsert(self): 'permissionMode should change') # read permissions and verify count doesn't increases again - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # update permission id @@ -1256,7 +1253,7 @@ def test_permission_upsert(self): 'permission resource should be same') # read permissions and verify count increases - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 2) # delete permissions @@ -1264,7 +1261,7 @@ def test_permission_upsert(self): user.delete_permission(new_permission.id) # read permissions and verify count remains the same - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count) def test_authorization(self): @@ -1350,15 +1347,15 @@ def __SetupEntities(client): return entities # Client without any authorization will fail. - client = cosmos_client.CosmosClient(CRUDTests.host, {}, "Session", CRUDTests.connectionPolicy) + client = cosmos_client.CosmosClient(CRUDTests.host, {}, "Session", connection_policy=CRUDTests.connectionPolicy) self.__AssertHTTPFailureWithStatus(StatusCodes.UNAUTHORIZED, list, - client.read_all_databases()) + client.list_databases()) # Client with master key. client = cosmos_client.CosmosClient(CRUDTests.host, - {'masterKey': CRUDTests.masterKey}, + CRUDTests.masterKey, "Session", - CRUDTests.connectionPolicy) + connection_policy=CRUDTests.connectionPolicy) # setup entities entities = __SetupEntities(client) resource_tokens = {} @@ -1367,7 +1364,7 @@ def __SetupEntities(client): resource_tokens[entities['doc1']['id']]= ( entities['permissionOnColl1'].properties['_token']) col1_client = cosmos_client.CosmosClient( - CRUDTests.host, {'resourceTokens': resource_tokens},"Session", CRUDTests.connectionPolicy) + CRUDTests.host, resource_tokens,"Session", connection_policy=CRUDTests.connectionPolicy) db = entities['db'] old_client_connection = db.client_connection @@ -1397,7 +1394,9 @@ def __SetupEntities(client): 'Expected to read children using parent permissions') col2_client = cosmos_client.CosmosClient( CRUDTests.host, - {'permissionFeed': [entities['permissionOnColl2'].properties]}, "Session", CRUDTests.connectionPolicy) + [entities['permissionOnColl2'].properties], + "Session", + connection_policy=CRUDTests.connectionPolicy) doc = { 'CustomProperty1': 'BBBBBB', 'customProperty2': 1000, @@ -1452,7 +1451,7 @@ def test_trigger_crud(self): {'name': '@id', 'value': trigger_definition['id']} ] )) - self.assert_(triggers) + self.assertTrue(triggers) # replace trigger change_trigger = trigger.copy() @@ -1510,7 +1509,7 @@ def test_udf_crud(self): {'name': '@id', 'value': udf_definition['id']} ] )) - self.assert_(results) + self.assertTrue(results) # replace udf change_udf = udf.copy() udf['body'] = 'function() {var x = 20;}' @@ -1855,7 +1854,11 @@ def test_create_indexing_policy_with_composite_and_spatial_indexes(self): created_container = db.create_container( id='composite_index_spatial_index' + str(uuid.uuid4()), indexing_policy=indexing_policy, - partition_key=PartitionKey(path='/id', kind='Hash') + partition_key=PartitionKey(path='/id', kind='Hash'), + headers={"Foo":"bar"}, + user_agent="blah", + user_agent_overwrite=True, + logging_enable=True ) created_properties = created_container.read() read_indexing_policy = created_properties['indexingPolicy'] @@ -1954,7 +1957,7 @@ def test_client_request_timeout(self): connection_policy.RequestTimeout = 0 with self.assertRaises(Exception): # client does a getDatabaseAccount on initialization, which will time out - cosmos_client.CosmosClient(CRUDTests.host, {'masterKey': CRUDTests.masterKey}, "Session", connection_policy) + cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) def test_query_iterable_functionality(self): def __create_resources(client): @@ -2013,18 +2016,15 @@ def __create_resources(client): # Get query results page by page. results = resources['coll'].read_all_items(max_item_count=2) - first_block = results.fetch_next_block() - self.assertEqual(2, - len(first_block), - 'First block should have 2 entries.') + + page_iter = results.by_page() + first_block = list(next(page_iter)) + self.assertEqual(2, len(first_block), 'First block should have 2 entries.') self.assertEqual(resources['doc1']['id'], first_block[0]['id']) self.assertEqual(resources['doc2']['id'], first_block[1]['id']) - self.assertEqual(1, - len(results.fetch_next_block()), - 'Second block should have 1 entry.') - self.assertEqual(0, - len(results.fetch_next_block()), - 'Then its empty.') + self.assertEqual(1, len(list(next(page_iter))), 'Second block should have 1 entry.') + with self.assertRaises(StopIteration): + next(page_iter) def test_trigger_functionality(self): triggers_in_collection1 = [ @@ -2388,7 +2388,7 @@ def test_id_case_validation(self): collection_id2 = 'SampleCollection ' + uuid_string # Verify that no collections exist - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) number_of_existing_collections = len(collections) # create 2 collections with different casing of IDs @@ -2404,7 +2404,7 @@ def test_id_case_validation(self): partition_key=PartitionKey(path='/id', kind='Hash') ) - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) # verify if a total of 2 collections got created self.assertEqual(len(collections), number_of_existing_collections + 2) @@ -2452,40 +2452,40 @@ def test_get_resource_with_dictionary_and_object(self): # read database with id read_db = self.client.get_database_client(created_db.id) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) # read database with instance read_db = self.client.get_database_client(created_db) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) # read database with properties read_db = self.client.get_database_client(created_db.read()) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) created_container = self.configs.create_multi_partition_collection_if_not_exist(self.client) # read container with id read_container = created_db.get_container_client(created_container.id) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) # read container with instance read_container = created_db.get_container_client(created_container) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) # read container with properties created_properties = created_container.read() read_container = created_db.get_container_client(created_properties) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) created_item = created_container.create_item({'id':'1' + str(uuid.uuid4())}) # read item with id read_item = created_container.read_item(item=created_item['id'], partition_key=created_item['id']) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) # read item with properties read_item = created_container.read_item(item=created_item, partition_key=created_item['id']) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) created_sproc = created_container.scripts.create_stored_procedure({ 'id': 'storedProcedure' + str(uuid.uuid4()), @@ -2494,11 +2494,11 @@ def test_get_resource_with_dictionary_and_object(self): # read sproc with id read_sproc = created_container.scripts.get_stored_procedure(created_sproc['id']) - self.assertEquals(read_sproc['id'], created_sproc['id']) + self.assertEqual(read_sproc['id'], created_sproc['id']) # read sproc with properties read_sproc = created_container.scripts.get_stored_procedure(created_sproc) - self.assertEquals(read_sproc['id'], created_sproc['id']) + self.assertEqual(read_sproc['id'], created_sproc['id']) created_trigger = created_container.scripts.create_trigger({ 'id': 'sample trigger' + str(uuid.uuid4()), @@ -2509,11 +2509,11 @@ def test_get_resource_with_dictionary_and_object(self): # read trigger with id read_trigger = created_container.scripts.get_trigger(created_trigger['id']) - self.assertEquals(read_trigger['id'], created_trigger['id']) + self.assertEqual(read_trigger['id'], created_trigger['id']) # read trigger with properties read_trigger = created_container.scripts.get_trigger(created_trigger) - self.assertEquals(read_trigger['id'], created_trigger['id']) + self.assertEqual(read_trigger['id'], created_trigger['id']) created_udf = created_container.scripts.create_user_defined_function({ 'id': 'sample udf' + str(uuid.uuid4()), @@ -2522,11 +2522,11 @@ def test_get_resource_with_dictionary_and_object(self): # read udf with id read_udf = created_container.scripts.get_user_defined_function(created_udf['id']) - self.assertEquals(created_udf['id'], read_udf['id']) + self.assertEqual(created_udf['id'], read_udf['id']) # read udf with properties read_udf = created_container.scripts.get_user_defined_function(created_udf) - self.assertEquals(created_udf['id'], read_udf['id']) + self.assertEqual(created_udf['id'], read_udf['id']) created_user = created_db.create_user({ 'id': 'user' + str(uuid.uuid4()) @@ -2534,16 +2534,16 @@ def test_get_resource_with_dictionary_and_object(self): # read user with id read_user = created_db.get_user_client(created_user.id) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) # read user with instance read_user = created_db.get_user_client(created_user) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) # read user with properties created_user_properties = created_user.read() read_user = created_db.get_user_client(created_user_properties) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) created_permission = created_user.create_permission({ 'id': 'all permission' + str(uuid.uuid4()), @@ -2554,15 +2554,15 @@ def test_get_resource_with_dictionary_and_object(self): # read permission with id read_permission = created_user.get_permission(created_permission.id) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) # read permission with instance read_permission = created_user.get_permission(created_permission) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) # read permission with properties read_permission = created_user.get_permission(created_permission.properties) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) def _MockExecuteFunction(self, function, *args, **kwargs): self.last_headers.append(args[4].headers[HttpHeaders.PartitionKey] diff --git a/sdk/cosmos/azure-cosmos/test/encoding_tests.py b/sdk/cosmos/azure-cosmos/test/encoding_tests.py index d48f8ef9e58e..1f0c23e7a334 100644 --- a/sdk/cosmos/azure-cosmos/test/encoding_tests.py +++ b/sdk/cosmos/azure-cosmos/test/encoding_tests.py @@ -25,7 +25,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) diff --git a/sdk/cosmos/azure-cosmos/test/env_test.py b/sdk/cosmos/azure-cosmos/test/env_test.py index 5210a0bc7ce2..62c0f81e665d 100644 --- a/sdk/cosmos/azure-cosmos/test/env_test.py +++ b/sdk/cosmos/azure-cosmos/test/env_test.py @@ -60,7 +60,7 @@ def setUpClass(cls): os.environ["COSMOS_ENDPOINT"] = cls.host os.environ["COSMOS_KEY"] = cls.masterKey - cls.client = cosmos_client.CosmosClient(url=cls.host, auth={'masterKey': cls.masterKey }, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(url=cls.host, credential=cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_single_partition_collection_if_not_exist(cls.client) diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py index 5c07812625bf..886e842be63c 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py @@ -152,10 +152,14 @@ def MockExecuteFunction(self, function, *args, **kwargs): else: self.endpoint_discovery_retry_count += 1 location_changed = True - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Forbidden", {'x-ms-substatus' : 3}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Forbidden", + response=test_config.FakeResponse({'x-ms-substatus' : 3})) def MockGetDatabaseAccountStub(self, endpoint): - raise errors.HTTPFailure(StatusCodes.SERVICE_UNAVAILABLE, "Service unavailable") + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.SERVICE_UNAVAILABLE, message="Service unavailable") def MockCreateDatabase(self, client, database): self.OriginalExecuteFunction = _retry_utility.ExecuteFunction @@ -166,7 +170,7 @@ def test_globaldb_endpoint_discovery_retry_policy(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - write_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.write_location_host, {'masterKey': Test_globaldb_mock_tests.masterKey}, connection_policy) + write_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.write_location_host, Test_globaldb_mock_tests.masterKey, connection_policy) self.assertEqual(write_location_client._global_endpoint_manager.WriteEndpoint, Test_globaldb_mock_tests.write_location_host) self.MockCreateDatabase(write_location_client, { 'id': 'mock database' }) @@ -177,7 +181,7 @@ def test_globaldb_database_account_unavailable(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.host, {'masterKey': Test_globaldb_mock_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.host, Test_globaldb_mock_tests.masterKey, connection_policy) self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_mock_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_mock_tests.write_location_host) diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py index e0e8eb41db7e..fc9e5d194f38 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py @@ -72,7 +72,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, sub_status, func, *args, ** try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) self.assertEqual(inst.sub_status, sub_status) @@ -86,7 +86,7 @@ def setUpClass(cls): "tests.") def setUp(self): - self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) # Create the test database only when it's not already present query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'') @@ -114,7 +114,7 @@ def test_globaldb_read_write_endpoints(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -140,7 +140,7 @@ def test_globaldb_read_write_endpoints(self): connection_policy.EnableEndpointDiscovery = True document_definition['id'] = 'doc2' - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # When EnableEndpointDiscovery is True, WriteEndpoint is set to the write endpoint created_document = client.CreateItem(self.test_coll['_self'], document_definition) @@ -163,7 +163,7 @@ def test_globaldb_endpoint_discovery(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -187,7 +187,7 @@ def test_globaldb_endpoint_discovery(self): })) connection_policy.EnableEndpointDiscovery = True - read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, Test_globaldb_tests.masterKey, connection_policy) # CreateDocument call will go to the WriteEndpoint as EnableEndpointDiscovery is set to True and client will resolve the right endpoint based on the operation created_document = read_location_client.CreateItem(self.test_coll['_self'], document_definition) @@ -197,7 +197,7 @@ def test_globaldb_preferred_locations(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -220,7 +220,7 @@ def test_globaldb_preferred_locations(self): self.assertEqual(client.ReadEndpoint, Test_globaldb_tests.write_location_host) connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition['id'] = 'doc2' created_document = client.CreateItem(self.test_coll['_self'], document_definition) @@ -242,28 +242,28 @@ def test_globaldb_endpoint_assignments(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # When EnableEndpointDiscovery is set to False, both Read and Write Endpoints point to endpoint passed while creating the client instance self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.host) connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # If no preferred locations is set, we return the write endpoint as ReadEndpoint for better latency performance, write endpoint is set as expected self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.write_location_host) connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # Test that the preferred location is set as ReadEndpoint instead of default write endpoint when no preference is set self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.read_location2_host) def test_globaldb_update_locations_cache(self): - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) writable_locations = [{'name' : Test_globaldb_tests.write_location, 'databaseAccountEndpoint' : Test_globaldb_tests.write_location_host}] readable_locations = [{'name' : Test_globaldb_tests.read_location, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location_host}, {'name' : Test_globaldb_tests.read_location2, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location2_host}] @@ -307,7 +307,7 @@ def test_globaldb_update_locations_cache(self): connection_policy = documents.ConnectionPolicy() connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -321,7 +321,7 @@ def test_globaldb_update_locations_cache(self): connection_policy = documents.ConnectionPolicy() connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -333,7 +333,7 @@ def test_globaldb_update_locations_cache(self): readable_locations = [{'name' : Test_globaldb_tests.read_location, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location_host}, {'name' : Test_globaldb_tests.read_location2, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location2_host}] connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -357,7 +357,7 @@ def test_globaldb_locational_endpoint_parser(self): self.assertEqual(locational_endpoint, 'https://contoso-EastUS.documents.azure.com:443/') def test_globaldb_endpoint_discovery_retry_policy_mock(self): - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) self.OriginalExecuteFunction = _retry_utility.ExecuteFunction _retry_utility.ExecuteFunction = self._MockExecuteFunction @@ -385,7 +385,11 @@ def test_globaldb_endpoint_discovery_retry_policy_mock(self): _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunction(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Write Forbidden", {'x-ms-substatus' : SubStatusCodes.WRITE_FORBIDDEN}) + response = test_config.FakeResponse({'x-ms-substatus' : SubStatusCodes.WRITE_FORBIDDEN}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Write Forbidden", + response=response) def _MockGetDatabaseAccount(self, url_conection): database_account = documents.DatabaseAccount() diff --git a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py index a2772ce270e8..7b5479bf22bf 100644 --- a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py +++ b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py @@ -11,10 +11,12 @@ import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes, SubStatusCodes, HttpHeaders from azure.cosmos import _retry_utility +import test_config import six pytestmark = pytest.mark.cosmosEmulator + class RefreshThread(threading.Thread): def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None): @@ -87,7 +89,7 @@ def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self else: client.CreateItem("dbs/mydb/colls/mycoll/", {'id':'1'}) self.fail() - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: # not retried self.assertEqual(self.counter, 1) self.counter = 0 @@ -99,7 +101,11 @@ def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): self.counter += 1 - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_validate_retry_on_session_not_availabe_with_endpoint_discovery_enabled(self): # sequence of chosen endpoints: @@ -128,7 +134,7 @@ def validate_retry_on_session_not_availabe(self, is_preferred_locations_list_emp try: client.ReadItem("dbs/mydb/colls/mycoll/docs/1") - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: # not retried self.assertEqual(self.counter, 4 if use_multiple_write_locations else 2) self.counter = 0 @@ -160,7 +166,11 @@ def _MockExecuteFunctionSessionReadFailureTwice(self, function, *args, **kwargs) self.assertTrue(request.should_clear_session_token_on_session_read_failure) self.assertEqual(expected_endpoint, request.location_endpoint_to_route) self.counter += 1 - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_validate_location_cache(self): self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount diff --git a/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py b/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py index f10493281e99..b68db27b520e 100644 --- a/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py +++ b/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py @@ -61,7 +61,7 @@ class MultiOrderbyTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.database = test_config._test_config.create_database_if_not_exist(cls.client) def generate_multi_orderby_item(self): diff --git a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py index e58dff33a4ae..efe8c3f7619d 100644 --- a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py +++ b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py @@ -36,8 +36,8 @@ def _validate_tentative_write_headers(self): connectionPolicy = MultiMasterTests.connectionPolicy connectionPolicy.UseMultipleWriteLocations = True - client = cosmos_client.CosmosClient(MultiMasterTests.host, {'masterKey': MultiMasterTests.masterKey}, "Session", - connectionPolicy) + client = cosmos_client.CosmosClient(MultiMasterTests.host, MultiMasterTests.masterKey, "Session", + connection_policy=connectionPolicy) created_db = client.create_database(id='multi_master_tests ' + str(uuid.uuid4())) diff --git a/sdk/cosmos/azure-cosmos/test/orderby_tests.py b/sdk/cosmos/azure-cosmos/test/orderby_tests.py index 7b84ed856aaa..ba1eb99b3c17 100644 --- a/sdk/cosmos/azure-cosmos/test/orderby_tests.py +++ b/sdk/cosmos/azure-cosmos/test/orderby_tests.py @@ -22,6 +22,7 @@ import unittest import uuid import pytest +from azure.core.paging import ItemPaged import azure.cosmos.documents as documents from azure.cosmos.partition_key import PartitionKey import azure.cosmos.cosmos_client as cosmos_client @@ -61,7 +62,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = CrossPartitionTopOrderByTest.create_collection(cls.client, cls.created_db) cls.collection_link = cls.GetDocumentCollectionLink(cls.created_db, cls.created_collection) @@ -431,8 +432,15 @@ def find_docs_by_partition_key_range_id(self): collection_id = base.GetResourceIdOrFullNameFromLink(self.collection_link) def fetch_fn(options): return self.client.client_connection.QueryFeed(path, collection_id, query, options, r['id']) - docResultsIterable = query_iterable.QueryIterable(self.client.client_connection, query, options, fetch_fn, self.collection_link) - + docResultsIterable = ItemPaged( + self.client.client_connection, + query, + options, + fetch_function=fetch_fn, + collection_link=self.collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + docs = list(docResultsIterable) self.assertFalse(r['id'] in docs_by_partition_key_range_id) docs_by_partition_key_range_id[r['id']] = docs @@ -448,7 +456,8 @@ def execute_query_and_validate_results(self, query, expected_ordered_ids): max_item_count=page_size ) - self.assertTrue(isinstance(result_iterable, query_iterable.QueryIterable)) + self.assertTrue(isinstance(result_iterable, ItemPaged)) + self.assertEqual(result_iterable._page_iterator_class, query_iterable.QueryIterable) ###################################### # test next() behavior @@ -466,14 +475,15 @@ def invokeNext(): self.assertRaises(StopIteration, invokeNext) ###################################### - # test fetch_next_block() behavior + # test by_page() behavior ###################################### results = {} cnt = 0 - while True: - fetched_res = result_iterable.fetch_next_block() + page_iter = result_iterable.by_page() + for page in page_iter: + fetched_res = list(page) fetched_size = len(fetched_res) - + for item in fetched_res: self.assertEqual(item['id'], expected_ordered_ids[cnt]) results[cnt] = item @@ -487,12 +497,14 @@ def invokeNext(): else: #cnt > expected_number_of_results self.fail("more results than expected") + # validate the number of collected results self.assertEqual(len(results), len(expected_ordered_ids)) # no more results will be returned - self.assertEqual(result_iterable.fetch_next_block(), []) + with self.assertRaises(StopIteration): + next(page_iter) @classmethod def create_collection(self, client, created_db): diff --git a/sdk/cosmos/azure-cosmos/test/partition_key_tests.py b/sdk/cosmos/azure-cosmos/test/partition_key_tests.py index cccd4ca3ae97..026b1f30ae56 100644 --- a/sdk/cosmos/azure-cosmos/test/partition_key_tests.py +++ b/sdk/cosmos/azure-cosmos/test/partition_key_tests.py @@ -49,7 +49,7 @@ def tearDownClass(cls): @classmethod def setUpClass(cls): - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) @@ -128,25 +128,25 @@ def test_non_partitioned_collection_operations(self): # Pass partitionKey.Empty as partition key to access documents from a single partition collection with v 2018-12-31 SDK read_item = created_container.read_item(self.created_document['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], self.created_document['id']) + self.assertEqual(read_item['id'], self.created_document['id']) document_definition = {'id': str(uuid.uuid4())} created_item = created_container.create_item(body=document_definition) - self.assertEquals(created_item['id'], document_definition['id']) + self.assertEqual(created_item['id'], document_definition['id']) read_item = created_container.read_item(created_item['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) document_definition_for_replace = {'id': str(uuid.uuid4())} replaced_item = created_container.replace_item(created_item['id'], body=document_definition_for_replace) - self.assertEquals(replaced_item['id'], document_definition_for_replace['id']) + self.assertEqual(replaced_item['id'], document_definition_for_replace['id']) upserted_item = created_container.upsert_item(body=document_definition) - self.assertEquals(upserted_item['id'], document_definition['id']) + self.assertEqual(upserted_item['id'], document_definition['id']) # one document was created during setup, one with create (which was replaced) and one with upsert items = list(created_container.query_items("SELECT * from c", partition_key=partition_key.NonePartitionKeyValue)) - self.assertEquals(len(items), 3) + self.assertEqual(len(items), 3) document_created_by_sproc_id = 'testDoc' sproc = { @@ -170,7 +170,7 @@ def test_non_partitioned_collection_operations(self): # 3 previous items + 1 created from the sproc items = list(created_container.read_all_items()) - self.assertEquals(len(items), 4) + self.assertEqual(len(items), 4) created_container.delete_item(upserted_item['id'], partition_key=partition_key.NonePartitionKeyValue) created_container.delete_item(replaced_item['id'], partition_key=partition_key.NonePartitionKeyValue) @@ -178,13 +178,13 @@ def test_non_partitioned_collection_operations(self): created_container.delete_item(self.created_document['id'], partition_key=partition_key.NonePartitionKeyValue) items = list(created_container.read_all_items()) - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) def test_multi_partition_collection_read_document_with_no_pk(self): document_definition = {'id': str(uuid.uuid4())} self.created_collection.create_item(body=document_definition) read_item = self.created_collection.read_item(item=document_definition['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], document_definition['id']) + self.assertEqual(read_item['id'], document_definition['id']) self.created_collection.delete_item(item=document_definition['id'], partition_key=partition_key.NonePartitionKeyValue) def test_hash_v2_partition_key_definition(self): @@ -193,7 +193,7 @@ def test_hash_v2_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash") ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 2) + self.assertEqual(created_container_properties['partitionKey']['version'], 2) self.created_db.delete_container(created_container) created_container = self.created_db.create_container( @@ -201,7 +201,7 @@ def test_hash_v2_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash", version=2) ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 2) + self.assertEqual(created_container_properties['partitionKey']['version'], 2) self.created_db.delete_container(created_container) def test_hash_v1_partition_key_definition(self): @@ -210,5 +210,5 @@ def test_hash_v1_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash", version=1) ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 1) + self.assertEqual(created_container_properties['partitionKey']['version'], 1) self.created_db.delete_container(created_container) diff --git a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py index f8138b6d5a8a..9b73a4f738d9 100644 --- a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py @@ -58,9 +58,9 @@ def setUpClass(cls): "tests.") cls.client = cosmos_client.CosmosClient(QueryExecutionContextEndToEndTests.host, - {'masterKey': QueryExecutionContextEndToEndTests.masterKey}, - "Session", - QueryExecutionContextEndToEndTests.connectionPolicy) + QueryExecutionContextEndToEndTests.masterKey, + "Session", + connection_policy=QueryExecutionContextEndToEndTests.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = cls.create_collection(cls.created_db) cls.document_definitions = [] diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index ca6c8a377fc0..862263f07d59 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -25,7 +25,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = cls.config.create_database_if_not_exist(cls.client) def test_first_and_last_slashes_trimmed_for_query_string (self): @@ -56,7 +56,7 @@ def test_query_change_feed(self): iter_list = list(query_iterable) self.assertEqual(len(iter_list), 0) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) - self.assertNotEquals(created_collection.client_connection.last_response_headers['etag'], '') + self.assertNotEqual(created_collection.client_connection.last_response_headers['etag'], '') # Read change feed from beginning should return an empty list query_iterable = created_collection.query_items_change_feed( @@ -67,7 +67,7 @@ def test_query_change_feed(self): self.assertEqual(len(iter_list), 0) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation1 = created_collection.client_connection.last_response_headers['etag'] - self.assertNotEquals(continuation1, '') + self.assertNotEqual(continuation1, '') # Create a document. Read change feed should return be able to read that document document_definition = {'pk': 'pk', 'id':'doc1'} @@ -81,8 +81,8 @@ def test_query_change_feed(self): self.assertEqual(iter_list[0]['id'], 'doc1') self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation2 = created_collection.client_connection.last_response_headers['etag'] - self.assertNotEquals(continuation2, '') - self.assertNotEquals(continuation2, continuation1) + self.assertNotEqual(continuation2, '') + self.assertNotEqual(continuation2, continuation1) # Create two new documents. Verify that change feed contains the 2 new documents # with page size 1 and page size 100 @@ -105,7 +105,7 @@ def test_query_change_feed(self): actual_ids += item['id'] + '.' self.assertEqual(actual_ids, expected_ids) - # verify fetch_next_block + # verify by_page # the options is not copied, therefore it need to be restored query_iterable = created_collection.query_items_change_feed( partition_key_range_id=pkRangeId, @@ -115,19 +115,16 @@ def test_query_change_feed(self): count = 0 expected_count = 2 all_fetched_res = [] - while (True): - fetched_res = query_iterable.fetch_next_block() - self.assertEquals(len(fetched_res), min(pageSize, expected_count - count)) + for page in query_iterable.by_page(): + fetched_res = list(page) + self.assertEqual(len(fetched_res), min(pageSize, expected_count - count)) count += len(fetched_res) all_fetched_res.extend(fetched_res) - if len(fetched_res) == 0: - break + actual_ids = '' for item in all_fetched_res: actual_ids += item['id'] + '.' self.assertEqual(actual_ids, expected_ids) - # verify there's no more results - self.assertEquals(query_iterable.fetch_next_block(), []) # verify reading change feed from the beginning query_iterable = created_collection.query_items_change_feed( @@ -138,7 +135,7 @@ def test_query_change_feed(self): it = query_iterable.__iter__() for i in range(0, len(expected_ids)): doc = next(it) - self.assertEquals(doc['id'], expected_ids[i]) + self.assertEqual(doc['id'], expected_ids[i]) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation3 = created_collection.client_connection.last_response_headers['etag'] @@ -203,11 +200,10 @@ def validate_query_requests_count(self, query_iterable, expected_count): self.count = 0 self.OriginalExecuteFunction = retry_utility.ExecuteFunction retry_utility.ExecuteFunction = self._MockExecuteFunction - block = query_iterable.fetch_next_block() - while block: - block = query_iterable.fetch_next_block() + for block in query_iterable.by_page(): + assert len(list(block)) != 0 retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.count, expected_count) + self.assertEqual(self.count, expected_count) self.count = 0 def _MockExecuteFunction(self, function, *args, **kwargs): diff --git a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py index f25dfce77e78..77b50ea7e3d7 100644 --- a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py @@ -58,7 +58,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -70,7 +70,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_collection = test_config._test_config.create_single_partition_collection_if_not_exist(cls.client) cls.retry_after_in_milliseconds = 1000 @@ -88,7 +88,7 @@ def test_resource_throttle_retry_policy_default_retry_after(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) self.assertGreaterEqual( self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], @@ -110,7 +110,7 @@ def test_resource_throttle_retry_policy_fixed_retry_after(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], @@ -133,7 +133,7 @@ def test_resource_throttle_retry_policy_max_wait_time(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxWaitTimeInSeconds * 1000) @@ -162,7 +162,7 @@ def test_resource_throttle_retry_policy_query(self): { 'name':'@id', 'value':document_definition['id'] } ] })) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) @@ -233,7 +233,7 @@ def test_default_retry_policy_for_create(self): created_document = {} try : created_document = self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as err: + except errors.CosmosHttpResponseError as err: self.assertEqual(err.status_code, 10054) self.assertDictEqual(created_document, {}) @@ -244,7 +244,12 @@ def test_default_retry_policy_for_create(self): _retry_utility.ExecuteFunction = original_execute_function def _MockExecuteFunction(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.TOO_MANY_REQUESTS, "Request rate is too large", {HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds}) + response = test_config.FakeResponse({HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.TOO_MANY_REQUESTS, + message="Request rate is too large", + response=response) + class MockExecuteFunctionConnectionReset(object): @@ -257,7 +262,10 @@ def __call__(self, func, *args, **kwargs): if self.counter % 3 == 0: return self.org_func(func, *args, **kwargs) else: - raise errors.HTTPFailure(10054, "Connection was reset", {}) + raise errors.CosmosHttpResponseError( + status_code=10054, + message="Connection was reset", + response=test_config.FakeResponse({})) if __name__ == '__main__': diff --git a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py index f2f396700be2..ac1fc549d175 100644 --- a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py +++ b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py @@ -55,7 +55,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.collection_link = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client).container_link def test_read_partition_key_ranges(self): diff --git a/sdk/cosmos/azure-cosmos/test/session_container_tests.py b/sdk/cosmos/azure-cosmos/test/session_container_tests.py index 035df68e94ac..4b2ff5513976 100644 --- a/sdk/cosmos/azure-cosmos/test/session_container_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_container_tests.py @@ -37,7 +37,7 @@ class Test_session_container(unittest.TestCase): connectionPolicy = test_config._test_config.connectionPolicy def setUp(self): - self.client = cosmos_client.CosmosClient(self.host, {'masterKey': self.masterkey}, "Session", connection_policy=self.connectionPolicy) + self.client = cosmos_client.CosmosClient(self.host, self.masterkey, "Session", connection_policy=self.connectionPolicy) self.session = self.client.client_connection.Session def tearDown(self): diff --git a/sdk/cosmos/azure-cosmos/test/session_tests.py b/sdk/cosmos/azure-cosmos/test/session_tests.py index f3e7e3e9fe93..c74506248f6f 100644 --- a/sdk/cosmos/azure-cosmos/test/session_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_tests.py @@ -33,7 +33,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) @@ -57,7 +57,11 @@ def test_session_token_not_sent_for_master_resource_ops (self): synchronized_request._Request = self._OriginalRequest def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_clear_session_token(self): created_document = self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) @@ -66,7 +70,7 @@ def test_clear_session_token(self): _retry_utility.ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce try: self.created_collection.read_item(item=created_document['id'], partition_key='mypk') - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(self.client.client_connection.session.get_session_token( 'dbs/' + self.created_db.id + '/colls/' + self.created_collection.id), "") self.assertEqual(e.status_code, StatusCodes.NOT_FOUND) @@ -84,7 +88,7 @@ def test_internal_server_error_raised_for_invalid_session_token_received_from_se try: self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) self.fail() - except errors.HTTPFailure as e: - self.assertEqual(e._http_error_message, "Could not parse the received session token: 2") + except errors.CosmosHttpResponseError as e: + self.assertEqual(e.http_error_message, "Could not parse the received session token: 2") self.assertEqual(e.status_code, StatusCodes.INTERNAL_SERVER_ERROR) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction diff --git a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py index 76e9482ade4f..91fec45ade57 100644 --- a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py @@ -1,7 +1,7 @@ import unittest import pytest from azure.cosmos._vector_session_token import VectorSessionToken -from azure.cosmos.errors import CosmosError +from azure.cosmos.errors import CosmosHttpResponseError pytestmark = pytest.mark.cosmosEmulator @@ -11,7 +11,7 @@ class SessionTokenUnitTest(unittest.TestCase): def test_validate_successful_session_token_parsing(self): #valid session token session_token = "1#100#1=20#2=5#3=30" - self.assertEquals(VectorSessionToken.create(session_token).convert_to_string(), "1#100#1=20#2=5#3=30") + self.assertEqual(VectorSessionToken.create(session_token).convert_to_string(), "1#100#1=20#2=5#3=30") def test_validate_session_token_parsing_with_invalid_version(self): session_token = "foo#100#1=20#2=5#3=30" @@ -76,5 +76,5 @@ def test_validate_session_token_comparison(self): try: session_token1.merge(session_token2) self.fail("Region progress can not be different when version is same") - except CosmosError as e: - self.assertEquals(str(e), "Status Code: 500. Compared session tokens '1#101#1=20#2=5#3=30' and '1#100#1=20#2=5#3=30#4=40' have unexpected regions.") + except CosmosHttpResponseError as e: + self.assertEqual(str(e), "Status code: 500\nCompared session tokens '1#101#1=20#2=5#3=30' and '1#100#1=20#2=5#3=30#4=40' have unexpected regions.") diff --git a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py index 73ab656627b2..2f92dd8a92f5 100644 --- a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py +++ b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py @@ -3,6 +3,7 @@ import pytest import azure.cosmos.documents as documents import azure.cosmos.errors as errors +import test_config from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes from azure.cosmos import _retry_utility from azure.cosmos import _endpoint_discovery_retry_policy @@ -83,7 +84,11 @@ def _MockExecuteFunctionEndpointDiscover(self, function, *args, **kwargs): return ({}, {}) else: self.endpoint_sequence.append(args[1].location_endpoint_to_route) - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Request is not permitted in this region", {HttpHeaders.SubStatus: SubStatusCodes.WRITE_FORBIDDEN}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.WRITE_FORBIDDEN}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Request is not permitted in this region", + response=response) def test_retry_policy_does_not_mark_null_locations_unavailable(self): self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount @@ -107,7 +112,8 @@ def test_retry_policy_does_not_mark_null_locations_unavailable(self): self._write_counter = 0 request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Read) endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) - endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) + endpointDiscovery_retry_policy.ShouldRetry(errors.CosmosHttpResponseError( + status_code=http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) @@ -115,7 +121,8 @@ def test_retry_policy_does_not_mark_null_locations_unavailable(self): self._write_counter = 0 request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Create) endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) - endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) + endpointDiscovery_retry_policy.ShouldRetry(errors.CosmosHttpResponseError( + status_code=http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) diff --git a/sdk/cosmos/azure-cosmos/test/test_config.py b/sdk/cosmos/azure-cosmos/test/test_config.py index cb3d053b524a..82dcafff145a 100644 --- a/sdk/cosmos/azure-cosmos/test/test_config.py +++ b/sdk/cosmos/azure-cosmos/test/test_config.py @@ -25,7 +25,7 @@ import azure.cosmos.documents as documents import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes -from azure.cosmos.database import Database +from azure.cosmos.database import DatabaseProxy from azure.cosmos.cosmos_client import CosmosClient from azure.cosmos.partition_key import PartitionKey from azure.cosmos.partition_key import NonePartitionKeyValue @@ -40,6 +40,7 @@ class _test_config(object): #[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Cosmos DB Emulator Key")] masterKey = os.getenv('ACCOUNT_KEY', 'C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==') host = os.getenv('ACCOUNT_HOST', 'https://localhost:443/') + connection_str = os.getenv('ACCOUNT_CONNECTION_STR', 'AccountEndpoint={};AccountKey={};'.format(host, masterKey)) connectionPolicy = documents.ConnectionPolicy() connectionPolicy.DisableSSLVerification = True @@ -86,7 +87,7 @@ def try_delete_database(cls, client): # type: (CosmosClient) -> None try: client.delete_database(cls.TEST_DATABASE_ID) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: if e.status_code != StatusCodes.NOT_FOUND: raise e @@ -161,5 +162,12 @@ def remove_all_documents(cls, document_collection, use_custom_partition_key): # sleep to ensure deletes are propagated for multimaster enabled accounts time.sleep(2) break - except errors.HTTPFailure as e: - print("Error occurred while deleting documents:" + str(e) + " \nRetrying...") \ No newline at end of file + except errors.CosmosHttpResponseError as e: + print("Error occurred while deleting documents:" + str(e) + " \nRetrying...") + + +class FakeResponse: + def __init__(self, headers): + self.headers = headers + self.reason = "foo" + self.status_code = "bar" diff --git a/sdk/cosmos/azure-cosmos/test/ttl_tests.py b/sdk/cosmos/azure-cosmos/test/ttl_tests.py index 9be94249f36a..d14a189bf08b 100644 --- a/sdk/cosmos/azure-cosmos/test/ttl_tests.py +++ b/sdk/cosmos/azure-cosmos/test/ttl_tests.py @@ -60,7 +60,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -71,7 +71,7 @@ def setUpClass(cls): "You must specify your Azure Cosmos account values for " "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) def test_collection_and_document_ttl_values(self): diff --git a/sdk/cosmos/azure-cosmos/test/utils_tests.py b/sdk/cosmos/azure-cosmos/test/utils_tests.py index 4349502fe854..589878fee7b2 100644 --- a/sdk/cosmos/azure-cosmos/test/utils_tests.py +++ b/sdk/cosmos/azure-cosmos/test/utils_tests.py @@ -21,9 +21,11 @@ import unittest import pytest +import azure.cosmos import azure.cosmos._utils as _utils import platform import azure.cosmos.http_constants as http_constants +import test_config pytestmark = pytest.mark.cosmosEmulator @@ -35,12 +37,20 @@ class UtilsTests(unittest.TestCase): def test_user_agent(self): user_agent = _utils.get_user_agent() - expected_user_agent = "{}/{} Python/{} azure-cosmos/{}".format( - platform.system(), platform.release(), platform.python_version(), - http_constants.Versions.SDKVersion + expected_user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format( + azure.cosmos.__version__, + platform.python_version(), + platform.platform() ) - self.assertEqual(user_agent, expected_user_agent) + + def test_connection_string(self): + client = azure.cosmos.CosmosClient.from_connection_string(test_config._test_config.connection_str) + databases = list(client.list_databases()) + assert len(databases) > 0 + assert isinstance(databases[0], dict) + assert databases[0].get('_etag') is not None + if __name__ == "__main__": unittest.main() From 577c12286085be3d8b513cf19bd49e66607f9ac9 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 9 Sep 2019 15:56:35 -0700 Subject: [PATCH 17/29] Added support for Urllib3 Connection retries Made Offer extend object instead of dict --- .../azure/cosmos/_cosmos_client_connection.py | 5 +++ .../azure-cosmos/azure/cosmos/documents.py | 3 ++ sdk/cosmos/azure-cosmos/azure/cosmos/offer.py | 2 +- sdk/cosmos/azure-cosmos/test/crud_tests.py | 39 +++++++++++++++++++ 4 files changed, 48 insertions(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 64c92d4de0b9..7b6fc126bd66 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -25,6 +25,7 @@ """Document client class for the Azure Cosmos database service. """ import requests +from requests.adapters import HTTPAdapter import six from . import _base as base @@ -134,6 +135,10 @@ def __init__( # creating a requests session used for connection pooling and re-used by all requests self._requests_session = requests.Session() + if self.connection_policy.ConnectionRetryConfiguration is not None: + adapter = HTTPAdapter(max_retries=self.connection_policy.ConnectionRetryConfiguration) + self._requests_session.mount('http://', adapter) + self._requests_session.mount('https://', adapter) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: host = connection_policy.ProxyConfiguration.Host diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index 79a558fb4a13..a0a6e51c1e50 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -372,6 +372,8 @@ class ConnectionPolicy(object): # pylint: disable=too-many-instance-attributes :ivar boolean UseMultipleWriteLocations: Flag to enable writes on any locations (regions) for geo-replicated database accounts in the azure Cosmos service. + :ivar (int or requests.packages.urllib3.util.retry) ConnectionRetryConfiguration: + Retry Configuration to be used for urllib3 connection retries. """ __defaultRequestTimeout = 60000 # milliseconds @@ -391,6 +393,7 @@ def __init__(self): self.RetryOptions = _retry_options.RetryOptions() self.DisableSSLVerification = False self.UseMultipleWriteLocations = False + self.ConnectionRetryConfiguration = None class _OperationType(object): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py index 4e7b240d1c8f..10d3f4b4a9f9 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py @@ -23,7 +23,7 @@ """ -class Offer(dict): +class Offer(object): """ Represents a offer in an Azure Cosmos DB SQL API container. To read and update offers use the associated methods on the :class:`Container`. diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 43a1d2096d24..10278d76ad2a 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -41,6 +41,8 @@ import urllib.parse as urllib import uuid import pytest +from requests.packages.urllib3.util.retry import Retry +from requests.exceptions import ConnectionError from azure.cosmos import _consistent_hash_ring import azure.cosmos.documents as documents import azure.cosmos.errors as errors @@ -1956,6 +1958,43 @@ def test_client_request_timeout(self): # client does a getDatabaseAccount on initialization, which will time out cosmos_client.CosmosClient(CRUDTests.host, {'masterKey': CRUDTests.masterKey}, "Session", connection_policy) + def test_client_request_timeout_when_connection_retry_configuration_specified(self): + connection_policy = documents.ConnectionPolicy() + # making timeout 0 ms to make sure it will throw + connection_policy.RequestTimeout = 0 + connection_policy.ConnectionRetryConfiguration = Retry( + total=3, + read=3, + connect=3, + backoff_factor=0.3, + status_forcelist=(500, 502, 504) + ) + with self.assertRaises(Exception): + # client does a getDatabaseAccount on initialization, which will time out + cosmos_client.CosmosClient(CRUDTests.host, {'masterKey': CRUDTests.masterKey}, "Session", connection_policy) + + def test_client_connection_retry_configuration(self): + total_time_for_two_retries = self.initialize_client_with_connection_retry_config(2) + total_time_for_three_retries = self.initialize_client_with_connection_retry_config(3) + self.assertGreater(total_time_for_three_retries, total_time_for_two_retries) + + def initialize_client_with_connection_retry_config(self, retries): + connection_policy = documents.ConnectionPolicy() + connection_policy.ConnectionRetryConfiguration = Retry( + total=retries, + read=retries, + connect=retries, + backoff_factor=0.3, + status_forcelist=(500, 502, 504) + ) + start_time = time.time() + try: + cosmos_client.CosmosClient("https://localhost:9999", {'masterKey': CRUDTests.masterKey}, "Session", connection_policy) + self.fail() + except ConnectionError as e: + end_time = time.time() + return end_time - start_time + def test_query_iterable_functionality(self): def __create_resources(client): """Creates resources for this test. From b5b4f8b3b6715ede76ba1b5296f89d3b5a8ba97a Mon Sep 17 00:00:00 2001 From: annatisch Date: Mon, 9 Sep 2019 17:57:58 -0700 Subject: [PATCH 18/29] [Cosmos] Bumped dependency (#7147) * Bumped dependency * Update reqs --- sdk/cosmos/azure-cosmos/requirements.txt | 4 ++-- sdk/cosmos/azure-cosmos/setup.py | 2 +- shared_requirements.txt | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/requirements.txt b/sdk/cosmos/azure-cosmos/requirements.txt index f21e3797be8f..5d92f68f2287 100644 --- a/sdk/cosmos/azure-cosmos/requirements.txt +++ b/sdk/cosmos/azure-cosmos/requirements.txt @@ -1,2 +1,2 @@ -requests >=2.10.0 -six >=1.6 \ No newline at end of file +azure-core<2.0.0,>=1.0.0b3 +six>=1.6 \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/setup.py b/sdk/cosmos/azure-cosmos/setup.py index a2bc049eaf9d..3dab953d7475 100644 --- a/sdk/cosmos/azure-cosmos/setup.py +++ b/sdk/cosmos/azure-cosmos/setup.py @@ -70,7 +70,7 @@ ), install_requires=[ 'six >=1.6', - 'azure-core<2.0.0,>=1.0.0b2' + 'azure-core<2.0.0,>=1.0.0b3' ], extras_require={ ":python_version<'3.0'": ["azure-nspkg"], diff --git a/shared_requirements.txt b/shared_requirements.txt index 329d6a2fb6db..ab3bba2f58c5 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -101,3 +101,4 @@ aiohttp>=3.0 aiodns>=2.0 python-dateutil>=2.8.0 six>=1.6 +#override azure-cosmos azure-core<2.0.0,>=1.0.0b3 From 700f5db396eb977a479584d1d58e244de6c034cb Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Mon, 9 Sep 2019 18:19:52 -0700 Subject: [PATCH 19/29] Misc fixes for Cosmos SDK (#7157) * Made offer extend object instead of dict * added support for urllib3 connection retry --- .../azure/cosmos/_cosmos_client_connection.py | 15 ++++++- .../azure-cosmos/azure/cosmos/documents.py | 3 ++ sdk/cosmos/azure-cosmos/azure/cosmos/offer.py | 2 +- sdk/cosmos/azure-cosmos/test/crud_tests.py | 41 +++++++++++++++++++ 4 files changed, 59 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 1fa3d3db9934..cb6cb7e7b0d3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -26,8 +26,11 @@ """ from typing import Dict, Any, Optional import six +import requests +from requests.adapters import HTTPAdapter from azure.core.paging import ItemPaged # type: ignore from azure.core import PipelineClient # type: ignore +from azure.core.pipeline.transport import RequestsTransport from azure.core.pipeline.policies import ( # type: ignore ContentDecodePolicy, HeadersPolicy, @@ -148,6 +151,16 @@ def __init__( self._useMultipleWriteLocations = False self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) + # creating a requests session used for connection pooling and re-used by all requests + requests_session = requests.Session() + + transport = None + if self.connection_policy.ConnectionRetryConfiguration is not None: + adapter = HTTPAdapter(max_retries=self.connection_policy.ConnectionRetryConfiguration) + requests_session.mount('http://', adapter) + requests_session.mount('https://', adapter) + transport = RequestsTransport(session=requests_session) + proxies = kwargs.pop('proxies', {}) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: host = self.connection_policy.ProxyConfiguration.Host @@ -165,7 +178,7 @@ def __init__( NetworkTraceLoggingPolicy(**kwargs), ] - self.pipeline_client = PipelineClient(url_connection, "empty-config", policies=policies) + self.pipeline_client = PipelineClient(url_connection, "empty-config", transport=transport, policies=policies) # Query compatibility mode. # Allows to specify compatibility mode used by client when making query requests. Should be removed when diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index a2661a71bb20..02b80331b281 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -372,6 +372,8 @@ class ConnectionPolicy(object): # pylint: disable=too-many-instance-attributes :ivar boolean UseMultipleWriteLocations: Flag to enable writes on any locations (regions) for geo-replicated database accounts in the azure Cosmos service. + :ivar (int or requests.packages.urllib3.util.retry) ConnectionRetryConfiguration: + Retry Configuration to be used for urllib3 connection retries. """ __defaultRequestTimeout = 60000 # milliseconds @@ -391,6 +393,7 @@ def __init__(self): self.RetryOptions = _retry_options.RetryOptions() self.DisableSSLVerification = False self.UseMultipleWriteLocations = False + self.ConnectionRetryConfiguration = None class _OperationType(object): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py index c4087542f003..77b523c35679 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py @@ -24,7 +24,7 @@ from typing import Dict, Any -class Offer(dict): +class Offer(object): """ Represents a offer in an Azure Cosmos DB SQL API container. To read and update offers use the associated methods on the :class:`Container`. diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 14529aede445..1854b23fae78 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -53,6 +53,9 @@ from azure.cosmos.partition_key import PartitionKey import conftest from azure.cosmos import _retry_utility +from requests.packages.urllib3.util.retry import Retry +from requests.exceptions import ConnectionError + pytestmark = pytest.mark.cosmosEmulator @@ -1959,6 +1962,44 @@ def test_client_request_timeout(self): # client does a getDatabaseAccount on initialization, which will time out cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) + def test_client_request_timeout_when_connection_retry_configuration_specified(self): + connection_policy = documents.ConnectionPolicy() + # making timeout 0 ms to make sure it will throw + connection_policy.RequestTimeout = 0 + connection_policy.ConnectionRetryConfiguration = Retry( + total=3, + read=3, + connect=3, + backoff_factor=0.3, + status_forcelist=(500, 502, 504) + ) + with self.assertRaises(Exception): + # client does a getDatabaseAccount on initialization, which will time out + cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) + + def test_client_connection_retry_configuration(self): + total_time_for_two_retries = self.initialize_client_with_connection_retry_config(2) + total_time_for_three_retries = self.initialize_client_with_connection_retry_config(3) + self.assertGreater(total_time_for_three_retries, total_time_for_two_retries) + + def initialize_client_with_connection_retry_config(self, retries): + from azure.core.exceptions import ServiceRequestError + connection_policy = documents.ConnectionPolicy() + connection_policy.ConnectionRetryConfiguration = Retry( + total=retries, + read=retries, + connect=retries, + backoff_factor=0.3, + status_forcelist=(500, 502, 504) + ) + start_time = time.time() + try: + cosmos_client.CosmosClient("https://localhost:9999", CRUDTests.masterKey, "Session", connection_policy=connection_policy) + self.fail() + except ServiceRequestError as e: + end_time = time.time() + return end_time - start_time + def test_query_iterable_functionality(self): def __create_resources(client): """Creates resources for this test. From 962391cefe3d56774a11f5f686a65e125175aedf Mon Sep 17 00:00:00 2001 From: annatisch Date: Fri, 4 Oct 2019 12:14:18 -0700 Subject: [PATCH 20/29] [Cosmos] Reconfigure retry policy (#7544) * Reconfigure retry policy * Review feedback * Fix pylint * Updated tests * Support client-side timeout * Updated timeout logic * Renamed client error * Updated tests * Patch azure-core Needed pending PR 7542 * Fixed status retry tests * Using dev core --- .../core/pipeline/transport/requests_basic.py | 2 +- .../azure-cosmos/azure/cosmos/__init__.py | 2 + .../azure/cosmos/_cosmos_client_connection.py | 37 ++++-- .../azure/cosmos/_global_endpoint_manager.py | 18 +-- .../azure/cosmos/_retry_utility.py | 88 +++++++++++++ .../azure/cosmos/_synchronized_request.py | 11 +- .../azure/cosmos/cosmos_client.py | 26 +++- .../azure-cosmos/azure/cosmos/documents.py | 6 +- .../azure-cosmos/azure/cosmos/errors.py | 10 ++ sdk/cosmos/azure-cosmos/dev_requirements.txt | 1 + sdk/cosmos/azure-cosmos/test/crud_tests.py | 119 +++++++++++++++--- 11 files changed, 273 insertions(+), 47 deletions(-) diff --git a/sdk/core/azure-core/azure/core/pipeline/transport/requests_basic.py b/sdk/core/azure-core/azure/core/pipeline/transport/requests_basic.py index 95757edb63dd..d18d63a436f1 100644 --- a/sdk/core/azure-core/azure/core/pipeline/transport/requests_basic.py +++ b/sdk/core/azure-core/azure/core/pipeline/transport/requests_basic.py @@ -246,7 +246,7 @@ def send(self, request, **kwargs): # type: ignore allow_redirects=False, **kwargs) - except urllib3.exceptions.NewConnectionError as err: + except (urllib3.exceptions.NewConnectionError, urllib3.exceptions.ConnectTimeoutError) as err: error = ServiceRequestError(err, error=err) except requests.exceptions.ReadTimeout as err: error = ServiceResponseError(err, error=err) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 93920f280f41..91182d089e7b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -19,6 +19,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from ._retry_utility import ConnectionRetryPolicy from .container import ContainerProxy from .cosmos_client import CosmosClient from .database import DatabaseProxy @@ -56,5 +57,6 @@ "SSLConfiguration", "TriggerOperation", "TriggerType", + "ConnectionRetryPolicy", ) __version__ = VERSION diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index cb6cb7e7b0d3..de0eeb4ed394 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -26,12 +26,11 @@ """ from typing import Dict, Any, Optional import six -import requests -from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.retry import Retry # pylint: disable=import-error from azure.core.paging import ItemPaged # type: ignore from azure.core import PipelineClient # type: ignore -from azure.core.pipeline.transport import RequestsTransport from azure.core.pipeline.policies import ( # type: ignore + HTTPPolicy, ContentDecodePolicy, HeadersPolicy, UserAgentPolicy, @@ -51,6 +50,7 @@ from . import _synchronized_request as synchronized_request from . import _global_endpoint_manager as global_endpoint_manager from ._routing import routing_map_provider +from ._retry_utility import ConnectionRetryPolicy from . import _session from . import _utils from .partition_key import _Undefined, _Empty @@ -151,15 +151,24 @@ def __init__( self._useMultipleWriteLocations = False self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) - # creating a requests session used for connection pooling and re-used by all requests - requests_session = requests.Session() - - transport = None - if self.connection_policy.ConnectionRetryConfiguration is not None: - adapter = HTTPAdapter(max_retries=self.connection_policy.ConnectionRetryConfiguration) - requests_session.mount('http://', adapter) - requests_session.mount('https://', adapter) - transport = RequestsTransport(session=requests_session) + retry_policy = None + if isinstance(self.connection_policy.ConnectionRetryConfiguration, HTTPPolicy): + retry_policy = self.connection_policy.ConnectionRetryConfiguration + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, int): + retry_policy = ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, Retry): + # Convert a urllib3 retry policy to a Pipeline policy + retry_policy = ConnectionRetryPolicy( + retry_total=self.connection_policy.ConnectionRetryConfiguration.total, + retry_connect=self.connection_policy.ConnectionRetryConfiguration.connect, + retry_read=self.connection_policy.ConnectionRetryConfiguration.read, + retry_status=self.connection_policy.ConnectionRetryConfiguration.status, + retry_backoff_max=self.connection_policy.ConnectionRetryConfiguration.BACKOFF_MAX, + retry_on_status_codes=list(self.connection_policy.ConnectionRetryConfiguration.status_forcelist), + retry_backoff_factor=self.connection_policy.ConnectionRetryConfiguration.backoff_factor + ) + else: + TypeError("Unsupported retry policy. Must be an azure.cosmos.ConnectionRetryPolicy, int, or urllib3.Retry") proxies = kwargs.pop('proxies', {}) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: @@ -173,11 +182,13 @@ def __init__( ProxyPolicy(proxies=proxies), UserAgentPolicy(base_user_agent=_utils.get_user_agent(), **kwargs), ContentDecodePolicy(), + retry_policy, CustomHookPolicy(**kwargs), DistributedTracingPolicy(), NetworkTraceLoggingPolicy(**kwargs), ] + transport = kwargs.pop("transport", None) self.pipeline_client = PipelineClient(url_connection, "empty-config", transport=transport, policies=policies) # Query compatibility mode. @@ -188,7 +199,7 @@ def __init__( # Routing map provider self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) - database_account = self._global_endpoint_manager._GetDatabaseAccount() + database_account = self._global_endpoint_manager._GetDatabaseAccount(**kwargs) self._global_endpoint_manager.force_refresh(database_account) @property diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py index d4dc37ee7533..acfab1059022 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py @@ -87,17 +87,17 @@ def force_refresh(self, database_account): self.refresh_needed = True self.refresh_endpoint_list(database_account) - def refresh_endpoint_list(self, database_account): + def refresh_endpoint_list(self, database_account, **kwargs): with self.refresh_lock: # if refresh is not needed or refresh is already taking place, return if not self.refresh_needed: return try: - self._refresh_endpoint_list_private(database_account) + self._refresh_endpoint_list_private(database_account, **kwargs) except Exception as e: raise e - def _refresh_endpoint_list_private(self, database_account=None): + def _refresh_endpoint_list_private(self, database_account=None, **kwargs): if database_account: self.location_cache.perform_on_database_account_read(database_account) self.refresh_needed = False @@ -107,18 +107,18 @@ def _refresh_endpoint_list_private(self, database_account=None): and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms ): if not database_account: - database_account = self._GetDatabaseAccount() + database_account = self._GetDatabaseAccount(**kwargs) self.location_cache.perform_on_database_account_read(database_account) self.last_refresh_time = self.location_cache.current_time_millis() self.refresh_needed = False - def _GetDatabaseAccount(self): + def _GetDatabaseAccount(self, **kwargs): """Gets the database account first by using the default endpoint, and if that doesn't returns use the endpoints for the preferred locations in the order they are specified to get the database account. """ try: - database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint) + database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) return database_account # If for any reason(non-globaldb related), we are not able to get the database # account from the above call to GetDatabaseAccount, we would try to get this @@ -130,18 +130,18 @@ def _GetDatabaseAccount(self): for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: - database_account = self._GetDatabaseAccountStub(locational_endpoint) + database_account = self._GetDatabaseAccountStub(locational_endpoint, **kwargs) return database_account except errors.CosmosHttpResponseError: pass return None - def _GetDatabaseAccountStub(self, endpoint): + def _GetDatabaseAccountStub(self, endpoint, **kwargs): """Stub for getting database account from the client which can be used for mocking purposes as well. """ - return self.Client.GetDatabaseAccount(endpoint) + return self.Client.GetDatabaseAccount(endpoint, **kwargs) @staticmethod def GetLocationalEndpoint(default_endpoint, location_name): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index df575cb27d36..20362d09aa32 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -24,6 +24,9 @@ import time +from azure.core.exceptions import AzureError, ClientAuthenticationError +from azure.core.pipeline.policies import RetryPolicy + from . import errors from . import _endpoint_discovery_retry_policy from . import _resource_throttle_retry_policy @@ -64,6 +67,8 @@ def Execute(client, global_endpoint_manager, function, *args, **kwargs): ) while True: try: + client_timeout = kwargs.get('timeout') + start_time = time.time() if args: result = ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) else: @@ -113,9 +118,92 @@ def Execute(client, global_endpoint_manager, function, *args, **kwargs): # Wait for retry_after_in_milliseconds time before the next retry time.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) + if client_timeout: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise errors.CosmosClientTimeoutError() def ExecuteFunction(function, *args, **kwargs): """ Stub method so that it can be used for mocking purposes as well. """ return function(*args, **kwargs) + + +def _configure_timeout(request, absolute, per_request): + # type: (azure.core.pipeline.PipelineRequest, Optional[int], int) -> Optional[AzureError] + if absolute is not None: + if absolute <= 0: + raise errors.CosmosClientTimeoutError() + if per_request: + # Both socket timeout and client timeout have been provided - use the shortest value. + request.context.options['connection_timeout'] = min(per_request, absolute) + else: + # Only client timeout provided. + request.context.options['connection_timeout'] = absolute + elif per_request: + # Only socket timeout provided. + request.context.options['connection_timeout'] = per_request + + +class ConnectionRetryPolicy(RetryPolicy): + + def __init__(self, **kwargs): + clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} + super(ConnectionRetryPolicy, self).__init__(**clean_kwargs) + + def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + Also enforces an absolute client-side timeout that spans multiple retry attempts. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raises: ~azure.cosmos.CosmosClientTimeoutError if specified timeout exceeded. + :raises: ~azure.core.exceptions.ClientAuthenticationError if authentication + """ + absolute_timeout = request.context.options.pop('timeout', None) + per_request_timeout = request.context.options.pop('connection_timeout', 0) + + retry_error = None + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + while retry_active: + try: + start_time = time.time() + _configure_timeout(request, absolute_timeout, per_request_timeout) + + response = self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.sleep(retry_settings, request.context.transport, response=response) + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except errors.CosmosClientTimeoutError as timeout_error: + timeout_error.inner_exception = retry_error + timeout_error.response = response + timeout_error.history = retry_settings['history'] + raise + except AzureError as err: + retry_error = err + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.sleep(retry_settings, request.context.transport) + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py index 7f1b900303ca..f697f72f66c8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py @@ -23,6 +23,7 @@ """ import json +import time from six.moves.urllib.parse import urlparse import six @@ -96,7 +97,13 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) # Every request tries to perform a refresh - global_endpoint_manager.refresh_endpoint_list(None) + client_timeout = kwargs.get('timeout') + start_time = time.time() + global_endpoint_manager.refresh_endpoint_list(None, **kwargs) + if client_timeout is not None: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise errors.CosmosClientTimeoutError() if request_params.endpoint_override: base_url = request_params.endpoint_override @@ -149,7 +156,7 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin return (response.stream_download(pipeline_client._pipeline), headers) data = response.body() - if not six.PY2: + if data and not six.PY2: # python 3 compatible: convert data from byte to unicode string data = data.decode("utf-8") diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 1d2cdcc55fc8..28536884dd4b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -22,13 +22,14 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, Mapping, Optional, Union, cast, Iterable, List +from typing import Any, Dict, Mapping, Optional, Union, cast, Iterable, List # pylint: disable=unused-import import six from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection from ._base import build_options +from ._retry_utility import ConnectionRetryPolicy from .database import DatabaseProxy from .documents import ConnectionPolicy, DatabaseAccount from .errors import CosmosResourceNotFoundError @@ -96,11 +97,25 @@ def _build_connection_policy(kwargs): # Retry config retry = kwargs.pop('retry_options', None) or policy.RetryOptions - retry._max_retry_attempt_count = kwargs.pop('retry_total', None) or retry._max_retry_attempt_count + total_retries = kwargs.pop('retry_total', None) + retry._max_retry_attempt_count = total_retries or retry._max_retry_attempt_count retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ retry._fixed_retry_interval_in_milliseconds - retry._max_wait_time_in_seconds = kwargs.pop('retry_backoff_max', None) or retry._max_wait_time_in_seconds + max_backoff = kwargs.pop('retry_backoff_max', None) + retry._max_wait_time_in_seconds = max_backoff or retry._max_wait_time_in_seconds policy.RetryOptions = retry + connection_retry = kwargs.pop('connection_retry_policy', None) or policy.ConnectionRetryConfiguration + if not connection_retry: + connection_retry = ConnectionRetryPolicy( + retry_total=total_retries, + retry_connect=kwargs.pop('retry_connect', None), + retry_read=kwargs.pop('retry_read', None), + retry_status=kwargs.pop('retry_status', None), + retry_backoff_max=max_backoff, + retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), + retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), + ) + policy.ConnectionRetryConfiguration = connection_retry return policy @@ -130,6 +145,11 @@ class CosmosClient(object): *retry_total* - Maximum retry attempts. *retry_backoff_max* - Maximum retry wait time in seconds. *retry_fixed_interval* - Fixed retry interval in milliseconds. + *retry_read* - Maximum number of socket read retry attempts. + *retry_connect* - Maximum number of connection error retry attempts. + *retry_status* - Maximum number of retry attempts on error status codes. + *retry_on_status_codes* - A list of specific status codes to retry on. + *retry_backoff_factor* - Factor to calculate wait time between retry attempts. *enable_endpoint_discovery* - Enable endpoint discovery for geo-replicated database accounts. Default is True. *preferred_locations* - The preferred locations for geo-replicated database accounts. When `enable_endpoint_discovery` is true and `preferred_locations` is non-empty, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index 02b80331b281..cfccc00f2ef4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -372,8 +372,10 @@ class ConnectionPolicy(object): # pylint: disable=too-many-instance-attributes :ivar boolean UseMultipleWriteLocations: Flag to enable writes on any locations (regions) for geo-replicated database accounts in the azure Cosmos service. - :ivar (int or requests.packages.urllib3.util.retry) ConnectionRetryConfiguration: - Retry Configuration to be used for urllib3 connection retries. + :ivar ConnectionRetryConfiguration: + Retry Configuration to be used for connection retries. + :vartype ConnectionRetryConfiguration: + int or azure.cosmos.ConnectionRetryPolicy or requests.packages.urllib3.util.retry """ __defaultRequestTimeout = 60000 # milliseconds diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py index 5fcb514959c1..698924ef3013 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py @@ -63,3 +63,13 @@ class CosmosResourceExistsError(ResourceExistsError, CosmosHttpResponseError): class CosmosAccessConditionFailedError(CosmosHttpResponseError): """An error response with status code 412.""" + + +class CosmosClientTimeoutError(AzureError): + """An operation failed to complete within the specified timeout.""" + + def __init__(self, **kwargs): + message = "Client operation failed to complete within specified timeout." + self.response = None + self.history = None + super(CosmosClientTimeoutError, self).__init__(message, **kwargs) diff --git a/sdk/cosmos/azure-cosmos/dev_requirements.txt b/sdk/cosmos/azure-cosmos/dev_requirements.txt index 6ccb7f031ddd..f3473b3bd4d0 100644 --- a/sdk/cosmos/azure-cosmos/dev_requirements.txt +++ b/sdk/cosmos/azure-cosmos/dev_requirements.txt @@ -1 +1,2 @@ -e ../../../tools/azure-sdk-tools +-e ../../core/azure-core diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 1d0212e9cf8a..85aa501cd50b 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -41,6 +41,8 @@ import urllib.parse as urllib import uuid import pytest +from azure.core.exceptions import AzureError, ServiceResponseError +from azure.core.pipeline.transport import RequestsTransport, RequestsTransportResponse from azure.cosmos import _consistent_hash_ring import azure.cosmos.documents as documents import azure.cosmos.errors as errors @@ -53,6 +55,7 @@ from azure.cosmos.partition_key import PartitionKey import conftest from azure.cosmos import _retry_utility +import requests from requests.packages.urllib3.util.retry import Retry from requests.exceptions import ConnectionError @@ -66,6 +69,26 @@ # To Run the test, replace the two member fields (masterKey and host) with values # associated with your Azure Cosmos account. + +class TimeoutTransport(RequestsTransport): + + def __init__(self, response): + self._response = response + super(TimeoutTransport, self).__init__() + + def send(self, *args, **kwargs): + if kwargs.pop("passthrough", False): + return super(TimeoutTransport, self).send(*args, **kwargs) + + time.sleep(5) + if isinstance(self._response, Exception): + raise self._response + output = requests.Response() + output.status_code = self._response + response = RequestsTransportResponse(None, output) + return response + + @pytest.mark.usefixtures("teardown") class CRUDTests(unittest.TestCase): """Python CRUD Tests. @@ -1977,7 +2000,7 @@ def __get_first(array): def test_client_request_timeout(self): connection_policy = documents.ConnectionPolicy() # making timeout 0 ms to make sure it will throw - connection_policy.RequestTimeout = 0 + connection_policy.RequestTimeout = 0.000000000001 with self.assertRaises(Exception): # client does a getDatabaseAccount on initialization, which will time out cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) @@ -1985,7 +2008,7 @@ def test_client_request_timeout(self): def test_client_request_timeout_when_connection_retry_configuration_specified(self): connection_policy = documents.ConnectionPolicy() # making timeout 0 ms to make sure it will throw - connection_policy.RequestTimeout = 0 + connection_policy.RequestTimeout = 0.000000000001 connection_policy.ConnectionRetryConfiguration = Retry( total=3, read=3, @@ -1993,33 +2016,95 @@ def test_client_request_timeout_when_connection_retry_configuration_specified(se backoff_factor=0.3, status_forcelist=(500, 502, 504) ) - with self.assertRaises(Exception): + with self.assertRaises(AzureError): # client does a getDatabaseAccount on initialization, which will time out cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) def test_client_connection_retry_configuration(self): - total_time_for_two_retries = self.initialize_client_with_connection_retry_config(2) - total_time_for_three_retries = self.initialize_client_with_connection_retry_config(3) + total_time_for_two_retries = self.initialize_client_with_connection_urllib_retry_config(2) + total_time_for_three_retries = self.initialize_client_with_connection_urllib_retry_config(3) self.assertGreater(total_time_for_three_retries, total_time_for_two_retries) - def initialize_client_with_connection_retry_config(self, retries): - from azure.core.exceptions import ServiceRequestError - connection_policy = documents.ConnectionPolicy() - connection_policy.ConnectionRetryConfiguration = Retry( - total=retries, - read=retries, - connect=retries, - backoff_factor=0.3, - status_forcelist=(500, 502, 504) - ) + total_time_for_two_retries = self.initialize_client_with_connection_core_retry_config(2) + total_time_for_three_retries = self.initialize_client_with_connection_core_retry_config(3) + self.assertGreater(total_time_for_three_retries, total_time_for_two_retries) + + def initialize_client_with_connection_urllib_retry_config(self, retries): + retry_policy = Retry( + total=retries, + read=retries, + connect=retries, + backoff_factor=0.3, + status_forcelist=(500, 502, 504) + ) start_time = time.time() try: - cosmos_client.CosmosClient("https://localhost:9999", CRUDTests.masterKey, "Session", connection_policy=connection_policy) + cosmos_client.CosmosClient( + "https://localhost:9999", + CRUDTests.masterKey, + "Session", + connection_retry_policy=retry_policy) self.fail() - except ServiceRequestError as e: + except AzureError as e: end_time = time.time() return end_time - start_time + def initialize_client_with_connection_core_retry_config(self, retries): + start_time = time.time() + try: + cosmos_client.CosmosClient( + "https://localhost:9999", + CRUDTests.masterKey, + "Session", + retry_total=retries, + retry_read=retries, + retry_connect=retries, + retry_status=retries) + self.fail() + except AzureError as e: + end_time = time.time() + return end_time - start_time + + def test_absolute_client_timeout(self): + with self.assertRaises(errors.CosmosClientTimeoutError): + cosmos_client.CosmosClient( + "https://localhost:9999", + CRUDTests.masterKey, + "Session", + retry_total=3, + timeout=1) + + error_response = ServiceResponseError("Read timeout") + timeout_transport = TimeoutTransport(error_response) + client = cosmos_client.CosmosClient( + self.host, self.masterKey, "Session", transport=timeout_transport, passthrough=True) + + with self.assertRaises(errors.CosmosClientTimeoutError): + client.create_database_if_not_exists("test", timeout=2) + + status_response = 500 # Users connection level retry + timeout_transport = TimeoutTransport(status_response) + client = cosmos_client.CosmosClient( + self.host, self.masterKey, "Session", transport=timeout_transport, passthrough=True) + with self.assertRaises(errors.CosmosClientTimeoutError): + client.create_database("test", timeout=2) + + databases = client.list_databases(timeout=2) + with self.assertRaises(errors.CosmosClientTimeoutError): + list(databases) + + status_response = 429 # Uses Cosmos custom retry + timeout_transport = TimeoutTransport(status_response) + client = cosmos_client.CosmosClient( + self.host, self.masterKey, "Session", transport=timeout_transport, passthrough=True) + with self.assertRaises(errors.CosmosClientTimeoutError): + client.create_database_if_not_exists("test", timeout=2) + + databases = client.list_databases(timeout=2) + with self.assertRaises(errors.CosmosClientTimeoutError): + list(databases) + + def test_query_iterable_functionality(self): def __create_resources(client): """Creates resources for this test. From dc9ca57880dc795899be36a7abe875bb78d31659 Mon Sep 17 00:00:00 2001 From: annatisch Date: Fri, 4 Oct 2019 15:25:04 -0700 Subject: [PATCH 21/29] [Cosmos] Docs updates (#7626) * Updated sample refs * Added release notes * Remove old rst files * Fixed kwarg formatting --- doc/sphinx/ref/azure.cosmos.base.rst | 7 -- .../ref/azure.cosmos.consistent_hash_ring.rst | 7 -- doc/sphinx/ref/azure.cosmos.constants.rst | 7 -- .../azure.cosmos.cosmos_client_connection.rst | 7 -- .../ref/azure.cosmos.default_retry_policy.rst | 7 -- ...cosmos.endpoint_discovery_retry_policy.rst | 7 -- ...e.cosmos.execution_context.aggregators.rst | 7 -- ...ecution_context.base_execution_context.rst | 7 -- ...os.execution_context.document_producer.rst | 7 -- ...s.execution_context.endpoint_component.rst | 7 -- ...execution_context.execution_dispatcher.rst | 7 -- ...ion_context.multi_execution_aggregator.rst | 7 -- ...execution_context.query_execution_info.rst | 7 -- .../ref/azure.cosmos.execution_context.rst | 23 ----- .../azure.cosmos.global_endpoint_manager.rst | 7 -- .../azure.cosmos.hash_partition_resolver.rst | 7 -- .../ref/azure.cosmos.location_cache.rst | 7 -- doc/sphinx/ref/azure.cosmos.murmur_hash.rst | 7 -- doc/sphinx/ref/azure.cosmos.partition.rst | 7 -- .../ref/azure.cosmos.query_iterable.rst | 7 -- doc/sphinx/ref/azure.cosmos.range.rst | 7 -- .../azure.cosmos.range_partition_resolver.rst | 7 -- .../ref/azure.cosmos.request_object.rst | 7 -- ....cosmos.resource_throttle_retry_policy.rst | 7 -- doc/sphinx/ref/azure.cosmos.retry_options.rst | 7 -- doc/sphinx/ref/azure.cosmos.retry_utility.rst | 7 -- ....cosmos.routing.collection_routing_map.rst | 7 -- ...re.cosmos.routing.routing_map_provider.rst | 7 -- .../azure.cosmos.routing.routing_range.rst | 7 -- doc/sphinx/ref/azure.cosmos.routing.rst | 19 ---- doc/sphinx/ref/azure.cosmos.rst | 32 ------- .../ref/azure.cosmos.runtime_constants.rst | 7 -- doc/sphinx/ref/azure.cosmos.session.rst | 7 -- .../ref/azure.cosmos.session_retry_policy.rst | 7 -- .../ref/azure.cosmos.synchronized_request.rst | 7 -- doc/sphinx/ref/azure.cosmos.utils.rst | 7 -- .../ref/azure.cosmos.vector_session_token.rst | 7 -- sdk/cosmos/azure-cosmos/HISTORY.md | 15 +++ .../azure-cosmos/azure/cosmos/container.py | 50 +++++----- .../azure/cosmos/cosmos_client.py | 59 ++++++++---- .../azure-cosmos/azure/cosmos/database.py | 93 ++++++++++--------- 41 files changed, 131 insertions(+), 398 deletions(-) delete mode 100644 doc/sphinx/ref/azure.cosmos.base.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.consistent_hash_ring.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.constants.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.cosmos_client_connection.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.default_retry_policy.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.endpoint_discovery_retry_policy.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.aggregators.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.base_execution_context.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.document_producer.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.endpoint_component.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.execution_dispatcher.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.multi_execution_aggregator.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.query_execution_info.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.execution_context.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.global_endpoint_manager.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.hash_partition_resolver.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.location_cache.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.murmur_hash.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.partition.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.query_iterable.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.range.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.range_partition_resolver.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.request_object.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.resource_throttle_retry_policy.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.retry_options.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.retry_utility.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.routing.collection_routing_map.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.routing.routing_map_provider.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.routing.routing_range.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.routing.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.runtime_constants.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.session.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.session_retry_policy.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.synchronized_request.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.utils.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.vector_session_token.rst diff --git a/doc/sphinx/ref/azure.cosmos.base.rst b/doc/sphinx/ref/azure.cosmos.base.rst deleted file mode 100644 index c6e3e7ac956d..000000000000 --- a/doc/sphinx/ref/azure.cosmos.base.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.base module -======================== - -.. automodule:: azure.cosmos.base - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.consistent_hash_ring.rst b/doc/sphinx/ref/azure.cosmos.consistent_hash_ring.rst deleted file mode 100644 index e4e19decf6e7..000000000000 --- a/doc/sphinx/ref/azure.cosmos.consistent_hash_ring.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.consistent\_hash\_ring module -========================================== - -.. automodule:: azure.cosmos.consistent_hash_ring - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.constants.rst b/doc/sphinx/ref/azure.cosmos.constants.rst deleted file mode 100644 index c929451faff4..000000000000 --- a/doc/sphinx/ref/azure.cosmos.constants.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.constants module -============================= - -.. automodule:: azure.cosmos.constants - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.cosmos_client_connection.rst b/doc/sphinx/ref/azure.cosmos.cosmos_client_connection.rst deleted file mode 100644 index c93e1acb1514..000000000000 --- a/doc/sphinx/ref/azure.cosmos.cosmos_client_connection.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.cosmos\_client\_connection module -============================================== - -.. automodule:: azure.cosmos.cosmos_client_connection - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.default_retry_policy.rst b/doc/sphinx/ref/azure.cosmos.default_retry_policy.rst deleted file mode 100644 index ba6e2b9e272e..000000000000 --- a/doc/sphinx/ref/azure.cosmos.default_retry_policy.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.default\_retry\_policy module -========================================== - -.. automodule:: azure.cosmos.default_retry_policy - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.endpoint_discovery_retry_policy.rst b/doc/sphinx/ref/azure.cosmos.endpoint_discovery_retry_policy.rst deleted file mode 100644 index 1d8317d5901b..000000000000 --- a/doc/sphinx/ref/azure.cosmos.endpoint_discovery_retry_policy.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.endpoint\_discovery\_retry\_policy module -====================================================== - -.. automodule:: azure.cosmos.endpoint_discovery_retry_policy - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.aggregators.rst b/doc/sphinx/ref/azure.cosmos.execution_context.aggregators.rst deleted file mode 100644 index 038b0f2fe6b6..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.aggregators.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.aggregators module -================================================== - -.. automodule:: azure.cosmos.execution_context.aggregators - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.base_execution_context.rst b/doc/sphinx/ref/azure.cosmos.execution_context.base_execution_context.rst deleted file mode 100644 index 2b1b41fa50d3..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.base_execution_context.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.base\_execution\_context module -=============================================================== - -.. automodule:: azure.cosmos.execution_context.base_execution_context - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.document_producer.rst b/doc/sphinx/ref/azure.cosmos.execution_context.document_producer.rst deleted file mode 100644 index 0cbf680d934a..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.document_producer.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.document\_producer module -========================================================= - -.. automodule:: azure.cosmos.execution_context.document_producer - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.endpoint_component.rst b/doc/sphinx/ref/azure.cosmos.execution_context.endpoint_component.rst deleted file mode 100644 index 669d9f0dd0cf..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.endpoint_component.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.endpoint\_component module -========================================================== - -.. automodule:: azure.cosmos.execution_context.endpoint_component - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.execution_dispatcher.rst b/doc/sphinx/ref/azure.cosmos.execution_context.execution_dispatcher.rst deleted file mode 100644 index e98b8b414a00..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.execution_dispatcher.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.execution\_dispatcher module -============================================================ - -.. automodule:: azure.cosmos.execution_context.execution_dispatcher - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.multi_execution_aggregator.rst b/doc/sphinx/ref/azure.cosmos.execution_context.multi_execution_aggregator.rst deleted file mode 100644 index 94c4002103fe..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.multi_execution_aggregator.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.multi\_execution\_aggregator module -=================================================================== - -.. automodule:: azure.cosmos.execution_context.multi_execution_aggregator - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.query_execution_info.rst b/doc/sphinx/ref/azure.cosmos.execution_context.query_execution_info.rst deleted file mode 100644 index c80fc761f649..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.query_execution_info.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.execution\_context.query\_execution\_info module -============================================================= - -.. automodule:: azure.cosmos.execution_context.query_execution_info - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.execution_context.rst b/doc/sphinx/ref/azure.cosmos.execution_context.rst deleted file mode 100644 index c294de2d2e2d..000000000000 --- a/doc/sphinx/ref/azure.cosmos.execution_context.rst +++ /dev/null @@ -1,23 +0,0 @@ -azure.cosmos.execution\_context package -======================================= - -Submodules ----------- - -.. toctree:: - - azure.cosmos.execution_context.aggregators - azure.cosmos.execution_context.base_execution_context - azure.cosmos.execution_context.document_producer - azure.cosmos.execution_context.endpoint_component - azure.cosmos.execution_context.execution_dispatcher - azure.cosmos.execution_context.multi_execution_aggregator - azure.cosmos.execution_context.query_execution_info - -Module contents ---------------- - -.. automodule:: azure.cosmos.execution_context - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.global_endpoint_manager.rst b/doc/sphinx/ref/azure.cosmos.global_endpoint_manager.rst deleted file mode 100644 index 8c6555c51860..000000000000 --- a/doc/sphinx/ref/azure.cosmos.global_endpoint_manager.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.global\_endpoint\_manager module -============================================= - -.. automodule:: azure.cosmos.global_endpoint_manager - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.hash_partition_resolver.rst b/doc/sphinx/ref/azure.cosmos.hash_partition_resolver.rst deleted file mode 100644 index 8186b372a0b5..000000000000 --- a/doc/sphinx/ref/azure.cosmos.hash_partition_resolver.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.hash\_partition\_resolver module -============================================= - -.. automodule:: azure.cosmos.hash_partition_resolver - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.location_cache.rst b/doc/sphinx/ref/azure.cosmos.location_cache.rst deleted file mode 100644 index 493f97ded96e..000000000000 --- a/doc/sphinx/ref/azure.cosmos.location_cache.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.location\_cache module -=================================== - -.. automodule:: azure.cosmos.location_cache - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.murmur_hash.rst b/doc/sphinx/ref/azure.cosmos.murmur_hash.rst deleted file mode 100644 index b707b587cdd5..000000000000 --- a/doc/sphinx/ref/azure.cosmos.murmur_hash.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.murmur\_hash module -================================ - -.. automodule:: azure.cosmos.murmur_hash - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.partition.rst b/doc/sphinx/ref/azure.cosmos.partition.rst deleted file mode 100644 index 24dc52be7284..000000000000 --- a/doc/sphinx/ref/azure.cosmos.partition.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.partition module -============================= - -.. automodule:: azure.cosmos.partition - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.query_iterable.rst b/doc/sphinx/ref/azure.cosmos.query_iterable.rst deleted file mode 100644 index 201f7eb62de8..000000000000 --- a/doc/sphinx/ref/azure.cosmos.query_iterable.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.query\_iterable module -=================================== - -.. automodule:: azure.cosmos.query_iterable - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.range.rst b/doc/sphinx/ref/azure.cosmos.range.rst deleted file mode 100644 index 1d405b489229..000000000000 --- a/doc/sphinx/ref/azure.cosmos.range.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.range module -========================= - -.. automodule:: azure.cosmos.range - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.range_partition_resolver.rst b/doc/sphinx/ref/azure.cosmos.range_partition_resolver.rst deleted file mode 100644 index 98072b272d88..000000000000 --- a/doc/sphinx/ref/azure.cosmos.range_partition_resolver.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.range\_partition\_resolver module -============================================== - -.. automodule:: azure.cosmos.range_partition_resolver - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.request_object.rst b/doc/sphinx/ref/azure.cosmos.request_object.rst deleted file mode 100644 index 6148122d9d39..000000000000 --- a/doc/sphinx/ref/azure.cosmos.request_object.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.request\_object module -=================================== - -.. automodule:: azure.cosmos.request_object - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.resource_throttle_retry_policy.rst b/doc/sphinx/ref/azure.cosmos.resource_throttle_retry_policy.rst deleted file mode 100644 index e0d232ebb7d8..000000000000 --- a/doc/sphinx/ref/azure.cosmos.resource_throttle_retry_policy.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.resource\_throttle\_retry\_policy module -===================================================== - -.. automodule:: azure.cosmos.resource_throttle_retry_policy - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.retry_options.rst b/doc/sphinx/ref/azure.cosmos.retry_options.rst deleted file mode 100644 index 16b91d2d4ab2..000000000000 --- a/doc/sphinx/ref/azure.cosmos.retry_options.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.retry\_options module -================================== - -.. automodule:: azure.cosmos.retry_options - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.retry_utility.rst b/doc/sphinx/ref/azure.cosmos.retry_utility.rst deleted file mode 100644 index 202552095a77..000000000000 --- a/doc/sphinx/ref/azure.cosmos.retry_utility.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.retry\_utility module -================================== - -.. automodule:: azure.cosmos.retry_utility - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.routing.collection_routing_map.rst b/doc/sphinx/ref/azure.cosmos.routing.collection_routing_map.rst deleted file mode 100644 index ae20b7ca66df..000000000000 --- a/doc/sphinx/ref/azure.cosmos.routing.collection_routing_map.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.routing.collection\_routing\_map module -==================================================== - -.. automodule:: azure.cosmos.routing.collection_routing_map - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.routing.routing_map_provider.rst b/doc/sphinx/ref/azure.cosmos.routing.routing_map_provider.rst deleted file mode 100644 index 6e5f248b7ebd..000000000000 --- a/doc/sphinx/ref/azure.cosmos.routing.routing_map_provider.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.routing.routing\_map\_provider module -================================================== - -.. automodule:: azure.cosmos.routing.routing_map_provider - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.routing.routing_range.rst b/doc/sphinx/ref/azure.cosmos.routing.routing_range.rst deleted file mode 100644 index 21eb737efa58..000000000000 --- a/doc/sphinx/ref/azure.cosmos.routing.routing_range.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.routing.routing\_range module -========================================== - -.. automodule:: azure.cosmos.routing.routing_range - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.routing.rst b/doc/sphinx/ref/azure.cosmos.routing.rst deleted file mode 100644 index 510474cdcc07..000000000000 --- a/doc/sphinx/ref/azure.cosmos.routing.rst +++ /dev/null @@ -1,19 +0,0 @@ -azure.cosmos.routing package -============================ - -Submodules ----------- - -.. toctree:: - - azure.cosmos.routing.collection_routing_map - azure.cosmos.routing.routing_map_provider - azure.cosmos.routing.routing_range - -Module contents ---------------- - -.. automodule:: azure.cosmos.routing - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.rst b/doc/sphinx/ref/azure.cosmos.rst index c6f7eec47cd0..223c5d09b63e 100644 --- a/doc/sphinx/ref/azure.cosmos.rst +++ b/doc/sphinx/ref/azure.cosmos.rst @@ -1,56 +1,24 @@ azure.cosmos package ==================== -Subpackages ------------ - -.. toctree:: - - azure.cosmos.execution_context - azure.cosmos.routing - Submodules ---------- .. toctree:: azure.cosmos.auth - azure.cosmos.base - azure.cosmos.consistent_hash_ring - azure.cosmos.constants azure.cosmos.container azure.cosmos.cosmos_client - azure.cosmos.cosmos_client_connection azure.cosmos.database - azure.cosmos.default_retry_policy azure.cosmos.diagnostics azure.cosmos.documents - azure.cosmos.endpoint_discovery_retry_policy azure.cosmos.errors - azure.cosmos.global_endpoint_manager - azure.cosmos.hash_partition_resolver azure.cosmos.http_constants - azure.cosmos.location_cache - azure.cosmos.murmur_hash azure.cosmos.offer - azure.cosmos.partition azure.cosmos.partition_key azure.cosmos.permission - azure.cosmos.query_iterable - azure.cosmos.range - azure.cosmos.range_partition_resolver - azure.cosmos.request_object - azure.cosmos.resource_throttle_retry_policy - azure.cosmos.retry_options - azure.cosmos.retry_utility - azure.cosmos.runtime_constants azure.cosmos.scripts - azure.cosmos.session - azure.cosmos.session_retry_policy - azure.cosmos.synchronized_request azure.cosmos.user - azure.cosmos.utils - azure.cosmos.vector_session_token Module contents --------------- diff --git a/doc/sphinx/ref/azure.cosmos.runtime_constants.rst b/doc/sphinx/ref/azure.cosmos.runtime_constants.rst deleted file mode 100644 index 9d162692291e..000000000000 --- a/doc/sphinx/ref/azure.cosmos.runtime_constants.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.runtime\_constants module -====================================== - -.. automodule:: azure.cosmos.runtime_constants - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.session.rst b/doc/sphinx/ref/azure.cosmos.session.rst deleted file mode 100644 index 7d1e4cb8d31c..000000000000 --- a/doc/sphinx/ref/azure.cosmos.session.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.session module -=========================== - -.. automodule:: azure.cosmos.session - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.session_retry_policy.rst b/doc/sphinx/ref/azure.cosmos.session_retry_policy.rst deleted file mode 100644 index 976d829c39a7..000000000000 --- a/doc/sphinx/ref/azure.cosmos.session_retry_policy.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.session\_retry\_policy module -========================================== - -.. automodule:: azure.cosmos.session_retry_policy - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.synchronized_request.rst b/doc/sphinx/ref/azure.cosmos.synchronized_request.rst deleted file mode 100644 index 3c871df5b2d1..000000000000 --- a/doc/sphinx/ref/azure.cosmos.synchronized_request.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.synchronized\_request module -========================================= - -.. automodule:: azure.cosmos.synchronized_request - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.utils.rst b/doc/sphinx/ref/azure.cosmos.utils.rst deleted file mode 100644 index 9fd2bbc2c9e9..000000000000 --- a/doc/sphinx/ref/azure.cosmos.utils.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.utils module -========================= - -.. automodule:: azure.cosmos.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.vector_session_token.rst b/doc/sphinx/ref/azure.cosmos.vector_session_token.rst deleted file mode 100644 index 57e7959f250f..000000000000 --- a/doc/sphinx/ref/azure.cosmos.vector_session_token.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.vector\_session\_token module -========================================== - -.. automodule:: azure.cosmos.vector_session_token - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/cosmos/azure-cosmos/HISTORY.md b/sdk/cosmos/azure-cosmos/HISTORY.md index 7d0212698a42..46198db224a1 100644 --- a/sdk/cosmos/azure-cosmos/HISTORY.md +++ b/sdk/cosmos/azure-cosmos/HISTORY.md @@ -1,5 +1,20 @@ # Change Log azure-cosmos +## Version 4.0.0b4: + +- Added support for a `timeout` keyword argument to all operations to specify an absolute timeout in seconds + within which the operation must be completed. If the timeout value is exceeded, a `azure.cosmos.errors.CosmosClientTimeoutError` will be raised. +- Added a new `ConnectionRetryPolicy` to manage retry behaviour during HTTP connection errors. +- Added new constructor and per-operation configuration keyword arguments: + -`retry_total` - Maximum retry attempts. + -`retry_backoff_max` - Maximum retry wait time in seconds. + -`retry_fixed_interval` - Fixed retry interval in milliseconds. + -`retry_read` - Maximum number of socket read retry attempts. + -`retry_connect` - Maximum number of connection error retry attempts. + -`retry_status` - Maximum number of retry attempts on error status codes. + -`retry_on_status_codes` - A list of specific status codes to retry on. + -`retry_backoff_factor` - Factor to calculate wait time between retry attempts. + ## Version 4.0.0b3: - Added `create_database_if_not_exists()` and `create_container_if_not_exists` functionalities to CosmosClient and Database respectively. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index f2418eb15d76..5e03290cfde3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -176,14 +176,15 @@ def read_item( :raise `CosmosHttpResponseError`: If the given item couldn't be retrieved. :rtype: dict[str, Any] - .. literalinclude:: ../../samples/examples.py - :start-after: [START update_item] - :end-before: [END update_item] - :language: python - :dedent: 0 - :caption: Get an item from the database and update one of its properties: - :name: update_item - + .. admonition:: Example: + + .. literalinclude:: ../../samples/examples.py + :start-after: [START update_item] + :end-before: [END update_item] + :language: python + :dedent: 0 + :caption: Get an item from the database and update one of its properties: + :name: update_item """ doc_link = self._get_document_link(item) request_options = build_options(kwargs) @@ -320,22 +321,23 @@ def query_items( :returns: An Iterable of items (dicts). :rtype: Iterable[dict[str, Any]] - .. literalinclude:: ../../samples/examples.py - :start-after: [START query_items] - :end-before: [END query_items] - :language: python - :dedent: 0 - :caption: Get all products that have not been discontinued: - :name: query_items - - .. literalinclude:: ../../samples/examples.py - :start-after: [START query_items_param] - :end-before: [END query_items_param] - :language: python - :dedent: 0 - :caption: Parameterized query to get all products that have been discontinued: - :name: query_items_param - + .. admonition:: Example: + + .. literalinclude:: ../../samples/examples.py + :start-after: [START query_items] + :end-before: [END query_items] + :language: python + :dedent: 0 + :caption: Get all products that have not been discontinued: + :name: query_items + + .. literalinclude:: ../../samples/examples.py + :start-after: [START query_items_param] + :end-before: [END query_items_param] + :language: python + :dedent: 0 + :caption: Parameterized query to get all products that have been discontinued: + :name: query_items_param """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 28536884dd4b..82ff11459b3c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -134,38 +134,60 @@ class CosmosClient(object): **Keyword arguments:** + *timeout* - An absolute timeout in seconds, for the combined HTTP request and response processing. + *request_timeout* - The HTTP request timeout in seconds. + *media_request_timeout* - The media request timeout in seconds. + *connection_mode* - The connection mode for the client - currently only supports 'Gateway'. + *media_read_mode* - The mode for use with downloading attachment content - default value is `Buffered`. - *proxy_config* - Instance of ~azure.cosmos.documents.ProxyConfiguration - *ssl_config* - Instance of ~azure.cosmos.documents.SSLConfiguration + + *proxy_config* - Instance of ~azure.cosmos.documents.ProxyConfiguration. + + *ssl_config* - Instance of ~azure.cosmos.documents.SSLConfiguration. + *connection_verify* - Whether to verify the connection, default value is True. + *connection_cert* - An alternative certificate to verify the connection. + *retry_total* - Maximum retry attempts. + *retry_backoff_max* - Maximum retry wait time in seconds. + *retry_fixed_interval* - Fixed retry interval in milliseconds. + *retry_read* - Maximum number of socket read retry attempts. + *retry_connect* - Maximum number of connection error retry attempts. + *retry_status* - Maximum number of retry attempts on error status codes. + *retry_on_status_codes* - A list of specific status codes to retry on. + *retry_backoff_factor* - Factor to calculate wait time between retry attempts. + *enable_endpoint_discovery* - Enable endpoint discovery for geo-replicated database accounts. Default is True. + *preferred_locations* - The preferred locations for geo-replicated database accounts. When `enable_endpoint_discovery` is true and `preferred_locations` is non-empty, the client will use this list to evaluate the final location, taking into consideration the order specified in `preferred_locations` list. The locations in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US', 'Central India' and so on. + *connection_policy* - An instance of ~azure.cosmos.documents.ConnectionPolicy - .. literalinclude:: ../../samples/examples.py - :start-after: [START create_client] - :end-before: [END create_client] - :language: python - :dedent: 0 - :caption: Create a new instance of the Cosmos DB client: - :name: create_client + .. admonition:: Example: + + .. literalinclude:: ../../samples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client """ def __init__(self, url, credential, consistency_level="Session", **kwargs): @@ -243,14 +265,15 @@ def create_database( # pylint: disable=redefined-builtin :rtype: ~azure.cosmos.database.DatabaseProxy :raises `CosmosResourceExistsError`: If database with the given ID already exists. - .. literalinclude:: ../../samples/examples.py - :start-after: [START create_database] - :end-before: [END create_database] - :language: python - :dedent: 0 - :caption: Create a database in the Cosmos DB account: - :name: create_database + .. admonition:: Example: + .. literalinclude:: ../../samples/examples.py + :start-after: [START create_database] + :end-before: [END create_database] + :language: python + :dedent: 0 + :caption: Create a database in the Cosmos DB account: + :name: create_database """ request_options = build_options(kwargs) @@ -341,9 +364,9 @@ def list_databases( :param int max_item_count: Max number of items to be returned in the enumeration operation. :param str session_token: Token for use with Session consistency. - :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param dict[str, str] feed_options: Dictionary of additional properties to be used for the request. + :param dict(str, str) feed_options: Dictionary of additional properties to be used for the request. :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). :rtype: Iterable[dict[str, str]] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 17331a7b28c7..c7c70945579b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -177,22 +177,23 @@ def create_container( :raise CosmosHttpResponseError: The container creation failed. :rtype: ~azure.cosmos.container.ContainerProxy - .. literalinclude:: ../../samples/examples.py - :start-after: [START create_container] - :end-before: [END create_container] - :language: python - :dedent: 0 - :caption: Create a container with default settings: - :name: create_container - - .. literalinclude:: ../../samples/examples.py - :start-after: [START create_container_with_settings] - :end-before: [END create_container_with_settings] - :language: python - :dedent: 0 - :caption: Create a container with specific settings; in this case, a custom partition key: - :name: create_container_with_settings - + .. admonition:: Example: + + .. literalinclude:: ../../samples/examples.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 0 + :caption: Create a container with default settings: + :name: create_container + + .. literalinclude:: ../../samples/examples.py + :start-after: [START create_container_with_settings] + :end-before: [END create_container_with_settings] + :language: python + :dedent: 0 + :caption: Create a container with specific settings; in this case, a custom partition key: + :name: create_container_with_settings """ definition = dict(id=id) # type: Dict[str, Any] if partition_key: @@ -322,14 +323,15 @@ def get_container_client(self, container): or a dict representing the properties of the container to be retrieved. :rtype: ~azure.cosmos.container.ContainerProxy - .. literalinclude:: ../../samples/examples.py - :start-after: [START get_container] - :end-before: [END get_container] - :language: python - :dedent: 0 - :caption: Get an existing container, handling a failure if encountered: - :name: get_container + .. admonition:: Example: + .. literalinclude:: ../../samples/examples.py + :start-after: [START get_container] + :end-before: [END get_container] + :language: python + :dedent: 0 + :caption: Get an existing container, handling a failure if encountered: + :name: get_container """ if isinstance(container, ContainerProxy): id_value = container.id @@ -355,14 +357,15 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw :returns: An Iterable of container properties (dicts). :rtype: Iterable[dict[str, Any]] - .. literalinclude:: ../../samples/examples.py - :start-after: [START list_containers] - :end-before: [END list_containers] - :language: python - :dedent: 0 - :caption: List all containers in the database: - :name: list_containers + .. admonition:: Example: + .. literalinclude:: ../../samples/examples.py + :start-after: [START list_containers] + :end-before: [END list_containers] + :language: python + :dedent: 0 + :caption: List all containers in the database: + :name: list_containers """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -453,14 +456,15 @@ def replace_container( :returns: A `ContainerProxy` instance representing the container after replace completed. :rtype: ~azure.cosmos.container.ContainerProxy - .. literalinclude:: ../../samples/examples.py - :start-after: [START reset_container_properties] - :end-before: [END reset_container_properties] - :language: python - :dedent: 0 - :caption: Reset the TTL property on a container, and display the updated properties: - :name: reset_container_properties + .. admonition:: Example: + .. literalinclude:: ../../samples/examples.py + :start-after: [START reset_container_properties] + :end-before: [END reset_container_properties] + :language: python + :dedent: 0 + :caption: Reset the TTL property on a container, and display the updated properties: + :name: reset_container_properties """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -580,14 +584,15 @@ def create_user(self, body, **kwargs): :raise `CosmosHttpResponseError`: If the given user couldn't be created. :rtype: ~azure.cosmos.user.UserProxy - .. literalinclude:: ../../samples/examples.py - :start-after: [START create_user] - :end-before: [END create_user] - :language: python - :dedent: 0 - :caption: Create a database user: - :name: create_user + .. admonition:: Example: + .. literalinclude:: ../../samples/examples.py + :start-after: [START create_user] + :end-before: [END create_user] + :language: python + :dedent: 0 + :caption: Create a database user: + :name: create_user """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) From ad54c4ffecec838cfc0b303773d56e69801ca7ca Mon Sep 17 00:00:00 2001 From: Daniel Jurek Date: Mon, 7 Oct 2019 11:52:13 -0700 Subject: [PATCH 22/29] add sdk tools repo (#7656) --- sdk/cosmos/tests.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/sdk/cosmos/tests.yml b/sdk/cosmos/tests.yml index 0278eee4e8dc..ac722f02a3bc 100644 --- a/sdk/cosmos/tests.yml +++ b/sdk/cosmos/tests.yml @@ -1,5 +1,12 @@ trigger: none +resources: + repositories: + - repository: azure-sdk-tools + type: github + name: Azure/azure-sdk-tools + endpoint: azure + jobs: - template: ../../eng/pipelines/templates/jobs/archetype-sdk-tests.yml parameters: From 81d96ce247196ad66388af887cbd21aaa87c56c3 Mon Sep 17 00:00:00 2001 From: annatisch Date: Mon, 7 Oct 2019 20:37:19 -0700 Subject: [PATCH 23/29] [Cosmos] More docs cleanup (#7661) * Removed old docs * Bumped core version * Removed submodule docs * Fixed imports * Pylint fix * More docs updates * Fixed docstring types * Docstring formatting * Updated snippet references * Fixed exception docs * More exception docstrings * Review feedback --- doc/sphinx/ref/azure.cosmos.auth.rst | 7 - doc/sphinx/ref/azure.cosmos.container.rst | 7 - doc/sphinx/ref/azure.cosmos.cosmos_client.rst | 7 - doc/sphinx/ref/azure.cosmos.database.rst | 7 - doc/sphinx/ref/azure.cosmos.documents.rst | 7 - doc/sphinx/ref/azure.cosmos.offer.rst | 7 - doc/sphinx/ref/azure.cosmos.partition_key.rst | 7 - doc/sphinx/ref/azure.cosmos.permission.rst | 7 - doc/sphinx/ref/azure.cosmos.rst | 10 - doc/sphinx/ref/azure.cosmos.scripts.rst | 7 - doc/sphinx/ref/azure.cosmos.user.rst | 7 - sdk/cosmos/azure-cosmos/MANIFEST.in | 5 +- sdk/cosmos/azure-cosmos/README.md | 16 +- .../azure-cosmos/azure/cosmos/__init__.py | 4 + .../azure/cosmos/_cosmos_client_connection.py | 2 +- .../azure/cosmos/_retry_utility.py | 6 +- .../azure-cosmos/azure/cosmos/container.py | 39 +-- .../azure/cosmos/cosmos_client.py | 74 +++--- .../azure-cosmos/azure/cosmos/database.py | 65 ++--- .../azure-cosmos/azure/cosmos/scripts.py | 36 +-- sdk/cosmos/azure-cosmos/azure/cosmos/user.py | 15 +- sdk/cosmos/azure-cosmos/doc/Makefile | 177 ------------- sdk/cosmos/azure-cosmos/doc/__init__.py | 0 sdk/cosmos/azure-cosmos/doc/conf.py | 227 ---------------- sdk/cosmos/azure-cosmos/doc/index.rst | 78 ------ sdk/cosmos/azure-cosmos/doc/make.bat | 242 ------------------ sdk/cosmos/azure-cosmos/requirements.txt | 2 +- sdk/cosmos/azure-cosmos/setup.py | 3 +- shared_requirements.txt | 1 - 29 files changed, 142 insertions(+), 930 deletions(-) delete mode 100644 doc/sphinx/ref/azure.cosmos.auth.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.container.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.cosmos_client.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.database.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.documents.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.offer.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.partition_key.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.permission.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.scripts.rst delete mode 100644 doc/sphinx/ref/azure.cosmos.user.rst delete mode 100644 sdk/cosmos/azure-cosmos/doc/Makefile delete mode 100644 sdk/cosmos/azure-cosmos/doc/__init__.py delete mode 100644 sdk/cosmos/azure-cosmos/doc/conf.py delete mode 100644 sdk/cosmos/azure-cosmos/doc/index.rst delete mode 100644 sdk/cosmos/azure-cosmos/doc/make.bat diff --git a/doc/sphinx/ref/azure.cosmos.auth.rst b/doc/sphinx/ref/azure.cosmos.auth.rst deleted file mode 100644 index 5b51703e86b3..000000000000 --- a/doc/sphinx/ref/azure.cosmos.auth.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.auth module -======================== - -.. automodule:: azure.cosmos.auth - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.container.rst b/doc/sphinx/ref/azure.cosmos.container.rst deleted file mode 100644 index 4bdd4a3f664d..000000000000 --- a/doc/sphinx/ref/azure.cosmos.container.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.container module -============================= - -.. automodule:: azure.cosmos.container - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.cosmos_client.rst b/doc/sphinx/ref/azure.cosmos.cosmos_client.rst deleted file mode 100644 index 6e3d7e333c4c..000000000000 --- a/doc/sphinx/ref/azure.cosmos.cosmos_client.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.cosmos\_client module -================================== - -.. automodule:: azure.cosmos.cosmos_client - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.database.rst b/doc/sphinx/ref/azure.cosmos.database.rst deleted file mode 100644 index f76ecaa24b69..000000000000 --- a/doc/sphinx/ref/azure.cosmos.database.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.database module -============================ - -.. automodule:: azure.cosmos.database - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.documents.rst b/doc/sphinx/ref/azure.cosmos.documents.rst deleted file mode 100644 index 0f0258fe9784..000000000000 --- a/doc/sphinx/ref/azure.cosmos.documents.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.documents module -============================= - -.. automodule:: azure.cosmos.documents - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.offer.rst b/doc/sphinx/ref/azure.cosmos.offer.rst deleted file mode 100644 index 4cd12bfda7a1..000000000000 --- a/doc/sphinx/ref/azure.cosmos.offer.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.offer module -========================= - -.. automodule:: azure.cosmos.offer - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.partition_key.rst b/doc/sphinx/ref/azure.cosmos.partition_key.rst deleted file mode 100644 index 4ba3b919078a..000000000000 --- a/doc/sphinx/ref/azure.cosmos.partition_key.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.partition\_key module -================================== - -.. automodule:: azure.cosmos.partition_key - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.permission.rst b/doc/sphinx/ref/azure.cosmos.permission.rst deleted file mode 100644 index e3d4b968a4f1..000000000000 --- a/doc/sphinx/ref/azure.cosmos.permission.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.permission module -============================== - -.. automodule:: azure.cosmos.permission - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.rst b/doc/sphinx/ref/azure.cosmos.rst index 223c5d09b63e..1a00e80192dc 100644 --- a/doc/sphinx/ref/azure.cosmos.rst +++ b/doc/sphinx/ref/azure.cosmos.rst @@ -6,19 +6,9 @@ Submodules .. toctree:: - azure.cosmos.auth - azure.cosmos.container - azure.cosmos.cosmos_client - azure.cosmos.database azure.cosmos.diagnostics - azure.cosmos.documents azure.cosmos.errors azure.cosmos.http_constants - azure.cosmos.offer - azure.cosmos.partition_key - azure.cosmos.permission - azure.cosmos.scripts - azure.cosmos.user Module contents --------------- diff --git a/doc/sphinx/ref/azure.cosmos.scripts.rst b/doc/sphinx/ref/azure.cosmos.scripts.rst deleted file mode 100644 index 7b2b9bdd80cb..000000000000 --- a/doc/sphinx/ref/azure.cosmos.scripts.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.scripts module -=========================== - -.. automodule:: azure.cosmos.scripts - :members: - :undoc-members: - :show-inheritance: diff --git a/doc/sphinx/ref/azure.cosmos.user.rst b/doc/sphinx/ref/azure.cosmos.user.rst deleted file mode 100644 index 1d4b665562fc..000000000000 --- a/doc/sphinx/ref/azure.cosmos.user.rst +++ /dev/null @@ -1,7 +0,0 @@ -azure.cosmos.user module -======================== - -.. automodule:: azure.cosmos.user - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/cosmos/azure-cosmos/MANIFEST.in b/sdk/cosmos/azure-cosmos/MANIFEST.in index d9252e22d644..e17b88ae2710 100644 --- a/sdk/cosmos/azure-cosmos/MANIFEST.in +++ b/sdk/cosmos/azure-cosmos/MANIFEST.in @@ -2,8 +2,5 @@ include README.md include HISTORY.md include LICENSE.txt include azure/__init__.py -recursive-include doc *.bat -recursive-include doc *.py -recursive-include doc *.rst -recursive-include doc Makefile +include samples/examples.py recursive-include test *.py \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 271f0a378191..bfb8b304d3e4 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -63,7 +63,7 @@ export ACCOUNT_KEY=$(az cosmosdb list-keys --resource-group $RES_GROUP --name $A Once you've populated the `ACCOUNT_URI` and `ACCOUNT_KEY` environment variables, you can create the [CosmosClient][ref_cosmosclient]. ```Python -from azure.cosmos import CosmosClient, Container, Database, PartitionKey, errors +from azure.cosmos import CosmosClient, PartitionKey, errors import os url = os.environ['ACCOUNT_URI'] @@ -271,14 +271,14 @@ For more extensive documentation on the Cosmos DB service, see the [Azure Cosmos [cosmos_sql_queries]: https://docs.microsoft.com/azure/cosmos-db/how-to-sql-query [cosmos_ttl]: https://docs.microsoft.com/azure/cosmos-db/time-to-live [python]: https://www.python.org/downloads/ -[ref_container_delete_item]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.container.html#azure.cosmos.container.ContainerProxy.delete_item -[ref_container_query_items]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.container.html#azure.cosmos.container.ContainerProxy.query_items -[ref_container_upsert_item]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.container.html#azure.cosmos.container.ContainerProxy.upsert_item -[ref_container]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.container.html +[ref_container_delete_item]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.ContainerProxy.delete_item +[ref_container_query_items]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.ContainerProxy.query_items +[ref_container_upsert_item]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.ContainerProxy.upsert_item +[ref_container]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.ContainerProxy [ref_cosmos_sdk]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html -[ref_cosmosclient_create_database]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.cosmos_client.html#azure.cosmos.cosmos_client.CosmosClient.create_database -[ref_cosmosclient]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.cosmos_client.html -[ref_database]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.database.html +[ref_cosmosclient_create_database]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.CosmosClient.create_database +[ref_cosmosclient]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.CosmosClient +[ref_database]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.html#azure.cosmos.DatabaseProxy [ref_httpfailure]: https://azure.github.io/azure-sdk-for-python/ref/azure.cosmos.errors.html#azure.cosmos.errors.CosmosHttpResponseError [sample_database_mgmt]: https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/cosmos/azure-cosmos/samples/DatabaseManagement [sample_document_mgmt]: https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/cosmos/azure-cosmos/samples/DocumentManagement diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 91182d089e7b..1f73d170e3c4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -25,6 +25,7 @@ from .database import DatabaseProxy from .user import UserProxy from .scripts import ScriptsProxy +from .offer import Offer from .documents import ( ConsistencyLevel, DataType, @@ -35,6 +36,7 @@ SSLConfiguration, TriggerOperation, TriggerType, + DatabaseAccount, ) from .partition_key import PartitionKey from .permission import Permission @@ -48,6 +50,8 @@ "Permission", "ScriptsProxy", "UserProxy", + "Offer", + "DatabaseAccount", "ConsistencyLevel", "DataType", "IndexKind", diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index de0eeb4ed394..52e3bd3cf5ff 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -36,8 +36,8 @@ UserAgentPolicy, NetworkTraceLoggingPolicy, CustomHookPolicy, + DistributedTracingPolicy, ProxyPolicy) -from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy # type: ignore from . import _base as base from . import documents diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index 20362d09aa32..ef67a7b69fbd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -160,9 +160,9 @@ def send(self, request): :type request: ~azure.core.pipeline.PipelineRequest :return: Returns the PipelineResponse or raises error if maximum retries exceeded. :rtype: ~azure.core.pipeline.PipelineResponse - :raises: ~azure.core.exceptions.AzureError if maximum retries exceeded. - :raises: ~azure.cosmos.CosmosClientTimeoutError if specified timeout exceeded. - :raises: ~azure.core.exceptions.ClientAuthenticationError if authentication + :raises ~azure.core.exceptions.AzureError: Maximum retries exceeded. + :raises ~azure.cosmos.errors.CosmosClientTimeoutError: Specified timeout exceeded. + :raises ~azure.core.exceptions.ClientAuthenticationError: Authentication failed. """ absolute_timeout = request.context.options.pop('timeout', None) per_request_timeout = request.context.options.pop('connection_timeout', 0) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 5e03290cfde3..73441d19f5ab 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -127,8 +127,8 @@ def read( :param populate_quota_info: Enable returning collection storage quota information in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `CosmosHttpResponseError`: Raised if the container couldn't be retrieved. This includes - if the container does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. :returns: Dict representing the retrieved container. :rtype: dict[str, Any] """ @@ -173,12 +173,12 @@ def read_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: Dict representing the item to be retrieved. - :raise `CosmosHttpResponseError`: If the given item couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The given item couldn't be retrieved. :rtype: dict[str, Any] .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START update_item] :end-before: [END update_item] :language: python @@ -323,7 +323,7 @@ def query_items( .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START query_items] :end-before: [END query_items] :language: python @@ -331,7 +331,7 @@ def query_items( :caption: Get all products that have not been discontinued: :name: query_items - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START query_items_param] :end-before: [END query_items_param] :language: python @@ -392,7 +392,8 @@ def replace_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the item after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the item with given id does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The replace failed or the item with + given id does not exist. :rtype: dict[str, Any] """ item_link = self._get_document_link(item) @@ -437,7 +438,7 @@ def upsert_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted item. - :raise `CosmosHttpResponseError`: If the given item could not be upserted. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The given item could not be upserted. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -482,7 +483,7 @@ def create_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new item. - :raises `CosmosHttpResponseError`: If item with the given ID already exists. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: Item with the given ID already exists. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -529,8 +530,8 @@ def delete_item( :param post_trigger_include: trigger id to be used as post operation trigger. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `CosmosHttpResponseError`: The item wasn't deleted successfully. If the item does not - exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The item wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The item does not exist in the container. :rtype: None """ request_options = build_options(kwargs) @@ -557,8 +558,9 @@ def read_offer(self, **kwargs): :param response_hook: a callable invoked with the response metadata :returns: Offer for the container. - :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be retrieved. - :rtype: ~azure.cosmos.offer.Offer + :raises ~azure.cosmos.errors.CosmosHttpResponseError: No offer exists for the container or + the offer could not be retrieved. + :rtype: ~azure.cosmos.Offer """ response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() @@ -587,8 +589,9 @@ def replace_throughput(self, throughput, **kwargs): :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the container, updated with new throughput. - :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be updated. - :rtype: ~azure.cosmos.offer.Offer + :raises ~azure.cosmos.errors.CosmosHttpResponseError: No offer exists for the container + or the offer could not be updated. + :rtype: ~azure.cosmos.Offer """ response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() @@ -691,7 +694,7 @@ def get_conflict(self, conflict, partition_key, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved conflict. - :raise `CosmosHttpResponseError`: If the given conflict couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The given conflict couldn't be retrieved. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -716,8 +719,8 @@ def delete_conflict(self, conflict, partition_key, **kwargs): :param partition_key: Partition key for the conflict to delete. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `CosmosHttpResponseError`: The conflict wasn't deleted successfully. If the conflict - does not exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The conflict wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The conflict does not exist in the container. :rtype: None """ request_options = build_options(kwargs) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 82ff11459b3c..c7065f8f8e5b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -128,7 +128,7 @@ class CosmosClient(object): :param str url: The URL of the Cosmos DB account. :param credential: Can be the account key, or a dictionary of resource tokens. - :type credential: str or dict(str, str) + :type credential: str or dict[str, str] :param str consistency_level: Consistency level to use for the session. The default value is "Session". @@ -144,9 +144,9 @@ class CosmosClient(object): *media_read_mode* - The mode for use with downloading attachment content - default value is `Buffered`. - *proxy_config* - Instance of ~azure.cosmos.documents.ProxyConfiguration. + *proxy_config* - Instance of `azure.cosmos.ProxyConfiguration`. - *ssl_config* - Instance of ~azure.cosmos.documents.SSLConfiguration. + *ssl_config* - Instance of `azure.cosmos.SSLConfiguration`. *connection_verify* - Whether to verify the connection, default value is True. @@ -171,17 +171,12 @@ class CosmosClient(object): *enable_endpoint_discovery* - Enable endpoint discovery for geo-replicated database accounts. Default is True. *preferred_locations* - The preferred locations for geo-replicated database accounts. - When `enable_endpoint_discovery` is true and `preferred_locations` is non-empty, - the client will use this list to evaluate the final location, taking into consideration - the order specified in `preferred_locations` list. The locations in this list are specified - as the names of the azure Cosmos locations like, 'West US', 'East US', 'Central India' - and so on. - *connection_policy* - An instance of ~azure.cosmos.documents.ConnectionPolicy + *connection_policy* - An instance of `azure.cosmos.documents.ConnectionPolicy` .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START create_client] :end-before: [END create_client] :language: python @@ -255,19 +250,22 @@ def create_database( # pylint: disable=redefined-builtin :param id: ID (name) of the database to create. :param str session_token: Token for use with Session consistency. - :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. - :param dict(str, str) access_condition: Conditions Associated with the request. + :param initial_headers: Initial headers to be sent as part of the request. + :type initial_headers: dict[str, str] + :param access_condition: Conditions Associated with the request. + :type access_condition: dict[str, str] :param bool populate_query_metrics: Enable returning query metrics in response headers. :param int offer_throughput: The provisioned throughput for this offer. - :param dict(str, Any) request_options: Dictionary of additional properties to be used for the request. + :param request_options: Dictionary of additional properties to be used for the request. + :type request_options: dict[str, Any] :param Callable response_hook: a callable invoked with the response metadata :returns: A DatabaseProxy instance representing the new database. - :rtype: ~azure.cosmos.database.DatabaseProxy - :raises `CosmosResourceExistsError`: If database with the given ID already exists. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.errors.CosmosResourceExistsError: Database with the given ID already exists. .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START create_database] :end-before: [END create_database] :language: python @@ -306,15 +304,18 @@ def create_database_if_not_exists( # pylint: disable=redefined-builtin :param id: ID (name) of the database to read or create. :param str session_token: Token for use with Session consistency. - :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. - :param dict(str, str) access_condition: Conditions Associated with the request. + :param initial_headers: Initial headers to be sent as part of the request. + :type initial_headers: dict[str, str] + :param access_condition: Conditions Associated with the request. + :type access_condition: dict[str, str] :param bool populate_query_metrics: Enable returning query metrics in response headers. :param int offer_throughput: The provisioned throughput for this offer. - :param dict(str, Any) request_options: Dictionary of additional properties to be used for the request. + :param request_options: Dictionary of additional properties to be used for the request. + :type request_options: dict[str, Any] :param Callable response_hook: a callable invoked with the response metadata :returns: A DatabaseProxy instance representing the database. - :rtype: ~azure.cosmos.database.DatabaseProxy - :raise CosmosHttpResponseError: The database read or creation failed. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The database read or creation failed. """ try: database_proxy = self.get_database_client(id) @@ -338,9 +339,9 @@ def get_database_client(self, database): :param database: The ID (name), dict representing the properties or `DatabaseProxy` instance of the database to read. - :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy :returns: A `DatabaseProxy` instance representing the retrieved database. - :rtype: ~azure.cosmos.database.DatabaseProxy + :rtype: ~azure.cosmos.DatabaseProxy """ if isinstance(database, DatabaseProxy): id_value = database.id @@ -364,9 +365,11 @@ def list_databases( :param int max_item_count: Max number of items to be returned in the enumeration operation. :param str session_token: Token for use with Session consistency. - :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. + :param initial_headers: Initial headers to be sent as part of the request. + :type initial_headers: dict[str, str] :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param dict(str, str) feed_options: Dictionary of additional properties to be used for the request. + :param feed_options: Dictionary of additional properties to be used for the request. + :type feed_options: dict[str, str] :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). :rtype: Iterable[dict[str, str]] @@ -403,9 +406,11 @@ def query_databases( served as indexing was opted out on the requested paths. :param int max_item_count: Max number of items to be returned in the enumeration operation. :param str session_token: Token for use with Session consistency. - :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param initial_headers: Initial headers to be sent as part of the request. + :type initial_headers: dict[str, str] :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param dict[str, Any] feed_options: Dictionary of additional properties to be used for the request. + :param feed_options: Dictionary of additional properties to be used for the request. + :type feed_options: dict[str, Any] :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). :rtype: Iterable[dict[str, str]] @@ -446,14 +451,17 @@ def delete_database( :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to delete. - :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy :param str session_token: Token for use with Session consistency. - :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. - :param dict[str, str] access_condition: Conditions Associated with the request. + :param initial_headers: Initial headers to be sent as part of the request. + :type initial_headers: dict[str, str] + :param access_condition: Conditions Associated with the request. + :type access_condition: dict[str, str] :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param dict[str, str] request_options: Dictionary of additional properties to be used for the request. + :param request_options: Dictionary of additional properties to be used for the request. + :type request_options: dict[str, Any] :param Callable response_hook: a callable invoked with the response metadata - :raise CosmosHttpResponseError: If the database couldn't be deleted. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the database couldn't be deleted. :rtype: None """ request_options = build_options(kwargs) @@ -474,7 +482,7 @@ def get_database_account(self, **kwargs): :param Callable response_hook: a callable invoked with the response metadata :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. - :rtype: ~azure.cosmos.documents.DatabaseAccount + :rtype: ~azure.cosmos.DatabaseAccount """ response_hook = kwargs.pop('response_hook', None) result = self.client_connection.GetDatabaseAccount(**kwargs) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index c7c70945579b..cc5067d003db 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -121,7 +121,7 @@ def read(self, populate_query_metrics=None, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :rtype: Dict[Str, Any] - :raise `CosmosHttpResponseError`: If the given database couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given database couldn't be retrieved. """ # TODO this helper function should be extracted from CosmosClient from .cosmos_client import CosmosClient @@ -174,12 +174,12 @@ def create_container( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A `ContainerProxy` instance representing the new container. - :raise CosmosHttpResponseError: The container creation failed. - :rtype: ~azure.cosmos.container.ContainerProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START create_container] :end-before: [END create_container] :language: python @@ -187,7 +187,7 @@ def create_container( :caption: Create a container with default settings: :name: create_container - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START create_container_with_settings] :end-before: [END create_container_with_settings] :language: python @@ -258,8 +258,8 @@ def create_container_if_not_exists( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A `ContainerProxy` instance representing the container. - :raise CosmosHttpResponseError: The container read or creation failed. - :rtype: ~azure.cosmos.container.ContainerProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The container read or creation failed. + :rtype: ~azure.cosmos.ContainerProxy """ try: @@ -301,7 +301,7 @@ def delete_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise CosmosHttpResponseError: If the container couldn't be deleted. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the container couldn't be deleted. :rtype: None """ request_options = build_options(kwargs) @@ -321,11 +321,11 @@ def get_container_client(self, container): :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, or a dict representing the properties of the container to be retrieved. - :rtype: ~azure.cosmos.container.ContainerProxy + :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START get_container] :end-before: [END get_container] :language: python @@ -359,7 +359,7 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START list_containers] :end-before: [END list_containers] :language: python @@ -451,14 +451,14 @@ def replace_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `CosmosHttpResponseError`: Raised if the container couldn't be replaced. This includes - if the container with given id does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: Raised if the container couldn't be replaced. + This includes if the container with given id does not exist. :returns: A `ContainerProxy` instance representing the container after replace completed. - :rtype: ~azure.cosmos.container.ContainerProxy + :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START reset_container_properties] :end-before: [END reset_container_properties] :language: python @@ -557,8 +557,8 @@ def get_user_client(self, user): :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be retrieved. :returns: A `UserProxy` instance representing the retrieved user. - :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. - :rtype: ~azure.cosmos.user.UserProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user couldn't be retrieved. + :rtype: ~azure.cosmos.UserProxy """ if isinstance(user, UserProxy): id_value = user.id @@ -577,16 +577,16 @@ def create_user(self, body, **kwargs): To update or replace an existing user, use the :func:`ContainerProxy.upsert_user` method. :param body: A dict-like object with an `id` key and value representing the user to be created. - The user ID must be unique within the database, and consist of no more than 255 characters. + The user ID must be unique within the database, and consist of no more than 255 characters. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A `UserProxy` instance representing the new user. - :raise `CosmosHttpResponseError`: If the given user couldn't be created. - :rtype: ~azure.cosmos.user.UserProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user couldn't be created. + :rtype: ~azure.cosmos.UserProxy .. admonition:: Example: - .. literalinclude:: ../../samples/examples.py + .. literalinclude:: ../samples/examples.py :start-after: [START create_user] :end-before: [END create_user] :language: python @@ -618,8 +618,8 @@ def upsert_user(self, body, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A `UserProxy` instance representing the upserted user. - :raise `CosmosHttpResponseError`: If the given user could not be upserted. - :rtype: ~azure.cosmos.user.UserProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user could not be upserted. + :rtype: ~azure.cosmos.UserProxy """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -652,8 +652,9 @@ def replace_user( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A `UserProxy` instance representing the user after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the user with given id does not exist. - :rtype: ~azure.cosmos.user.UserProxy + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the replace failed or the user with given + id does not exist. + :rtype: ~azure.cosmos.UserProxy """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -682,8 +683,8 @@ def delete_user(self, user, **kwargs): instance of the user to be deleted. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `CosmosHttpResponseError`: The user wasn't deleted successfully. If the user does not - exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The user wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The user does not exist in the container. :rtype: None """ request_options = build_options(kwargs) @@ -703,8 +704,9 @@ def read_offer(self, **kwargs): :param response_hook: a callable invoked with the response metadata :returns: Offer for the database. - :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be retrieved. - :rtype: ~azure.cosmos.offer.Offer + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If no offer exists for the database or if the + offer could not be retrieved. + :rtype: ~azure.cosmos.Offer """ response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() @@ -733,8 +735,9 @@ def replace_throughput(self, throughput, **kwargs): :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the database, updated with new throughput. - :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be updated. - :rtype: ~azure.cosmos.offer.Offer + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If no offer exists for the database or if the + offer could not be updated. + :rtype: ~azure.cosmos.Offer """ response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py index d7208f918c4c..8cb9e95d3b82 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py @@ -107,7 +107,7 @@ def get_stored_procedure(self, sproc, **kwargs): :param sproc: The ID (name) or dict representing stored procedure to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved stored procedure. - :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given stored procedure couldn't be retrieved. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -125,7 +125,7 @@ def create_stored_procedure(self, body, **kwargs): :param body: A dict-like object representing the sproc to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new stored procedure. - :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be created. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given stored procedure couldn't be created. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -143,7 +143,8 @@ def replace_stored_procedure(self, sproc, body, **kwargs): :param body: A dict-like object representing the sproc to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the stored procedure after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the stored procedure with given id does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the replace failed or the stored + procedure with given id does not exist. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -162,8 +163,8 @@ def delete_stored_procedure(self, sproc, **kwargs): :param sproc: The ID (name) or dict representing stored procedure to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `CosmosHttpResponseError`: The sproc wasn't deleted successfully. If the sproc does not - exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The sproc wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The sproc does not exist in the container. :rtype: None """ request_options = build_options(kwargs) @@ -190,8 +191,8 @@ def execute_stored_procedure( :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. :param request_options: Dictionary of additional properties to be used for the request. :returns: Result of the executed stored procedure for the given parameters. - :raise `CosmosHttpResponseError`: If the stored procedure execution failed or if the stored procedure with - given id does not exists in the container. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the stored procedure execution failed + or if the stored procedure with given id does not exists in the container. :rtype: dict[str, Any] """ @@ -261,7 +262,7 @@ def get_trigger(self, trigger, **kwargs): :param trigger: The ID (name) or dict representing trigger to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved trigger. - :raise `CosmosHttpResponseError`: If the given trigger couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given trigger couldn't be retrieved. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -279,7 +280,7 @@ def create_trigger(self, body, **kwargs): :param body: A dict-like object representing the trigger to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new trigger. - :raise `CosmosHttpResponseError`: If the given trigger couldn't be created. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given trigger couldn't be created. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -297,7 +298,8 @@ def replace_trigger(self, trigger, body, **kwargs): :param body: A dict-like object representing the trigger to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the trigger after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the trigger with given id does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the replace failed or the trigger with given + id does not exist. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -316,8 +318,8 @@ def delete_trigger(self, trigger, **kwargs): :param trigger: The ID (name) or dict representing trigger to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `CosmosHttpResponseError`: The trigger wasn't deleted successfully. If the trigger does not - exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The trigger wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The trigger does not exist in the container. :rtype: None """ request_options = build_options(kwargs) @@ -375,7 +377,7 @@ def get_user_defined_function(self, udf, **kwargs): :param udf: The ID (name) or dict representing udf to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved user defined function. - :raise `CosmosHttpResponseError`: If the given user defined function couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user defined function couldn't be retrieved. :rtype: Iterable[dict[str, Any]] """ request_options = build_options(kwargs) @@ -393,7 +395,7 @@ def create_user_defined_function(self, body, **kwargs): :param body: A dict-like object representing the udf to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new user defined function. - :raise `CosmosHttpResponseError`: If the given user defined function couldn't be created. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user defined function couldn't be created. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -411,7 +413,7 @@ def replace_user_defined_function(self, udf, body, **kwargs): :param body: A dict-like object representing the udf to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the user defined function after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the user defined function with + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the replace failed or the user defined function with given id does not exist. :rtype: dict[str, Any] """ @@ -431,8 +433,8 @@ def delete_user_defined_function(self, udf, **kwargs): :param udf: The ID (name) or dict representing udf to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `CosmosHttpResponseError`: The udf wasn't deleted successfully. If the udf does not - exist in the container, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The udf wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The UDF does not exist in the container. :rtype: None """ request_options = build_options(kwargs) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py index 9328548206b8..9352d6755582 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py @@ -72,7 +72,7 @@ def read(self, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A :class:`UserProxy` instance representing the retrieved user. - :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given user couldn't be retrieved. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -157,7 +157,7 @@ def get_permission(self, permission, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved permission. - :raise `CosmosHttpResponseError`: If the given permission couldn't be retrieved. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given permission couldn't be retrieved. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -189,7 +189,7 @@ def create_permission(self, body, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new permission. - :raise `CosmosHttpResponseError`: If the given permission couldn't be created. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given permission couldn't be created. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -221,7 +221,7 @@ def upsert_permission(self, body, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted permission. - :raise `CosmosHttpResponseError`: If the given permission could not be upserted. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the given permission could not be upserted. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -254,7 +254,8 @@ def replace_permission(self, permission, body, **kwargs): :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the permission after replace went through. - :raise `CosmosHttpResponseError`: If the replace failed or the permission with given id does not exist. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: If the replace failed or the permission + with given id does not exist. :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -285,8 +286,8 @@ def delete_permission(self, permission, **kwargs): instance of the permission to be replaced. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `CosmosHttpResponseError`: The permission wasn't deleted successfully. If the permission does - not exist for the user, a `404` error is returned. + :raises ~azure.cosmos.errors.CosmosHttpResponseError: The permission wasn't deleted successfully. + :raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The permission does not exist for the user. :rtype: None """ request_options = build_options(kwargs) diff --git a/sdk/cosmos/azure-cosmos/doc/Makefile b/sdk/cosmos/azure-cosmos/doc/Makefile deleted file mode 100644 index f76148eb7e27..000000000000 --- a/sdk/cosmos/azure-cosmos/doc/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/azure-cosmos.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/azure-cosmos.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/azure-cosmos" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/azure-cosmos" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/sdk/cosmos/azure-cosmos/doc/__init__.py b/sdk/cosmos/azure-cosmos/doc/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/sdk/cosmos/azure-cosmos/doc/conf.py b/sdk/cosmos/azure-cosmos/doc/conf.py deleted file mode 100644 index 09fbff29c5b9..000000000000 --- a/sdk/cosmos/azure-cosmos/doc/conf.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -# -# azure-cosmos documentation build configuration file, created by -# sphinx-quickstart on Fri Jun 27 15:42:45 2014. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', - 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'azure-cosmos' -copyright = u'2017, Microsoft' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '4.0.0b1' -# The full version, including alpha/beta/rc tags. -release = '4.0.0b1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# -- Options for extensions ---------------------------------------------------- -autoclass_content = 'both' - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' -html_theme_options = {'collapsiblesidebar': True} - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'azure-cosmosdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'azure-cosmos.tex', u'azure-cosmos Documentation', - u'Microsoft', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - diff --git a/sdk/cosmos/azure-cosmos/doc/index.rst b/sdk/cosmos/azure-cosmos/doc/index.rst deleted file mode 100644 index b3c201a3d59c..000000000000 --- a/sdk/cosmos/azure-cosmos/doc/index.rst +++ /dev/null @@ -1,78 +0,0 @@ -.. azure-cosmos documentation master file, created by - sphinx-quickstart on Fri Jun 27 15:42:45 2014. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Azure Cosmos Python SDK -======================================== - -System Requirements: --------------------- - - The supported Python versions are 2.7, 3.3, 3.4 and 3.5. To download Python, please visit https://www.python.org/download/releases. - - - Python Tools for Visual Studio is required when using Microsoft Visual - Studio to develop Python applications. To download Python Tools for Visual Studio, please visit http://microsoft.github.io/PTVS. - - -Installation: -------------- - - Method 1: - - 1. Download the Azure Cosmos Python SDK source from - https://github.com/Azure/azure-cosmos-python which is needed to manage the Azure Cosmos database service. - - 2. Execute the following setup script in bash shell: - - .. code-block:: bash - - $ python setup.py install - - Method 2: - - 1. Install the Azure Cosmos Python SDK using pip. - For more information on pip, please visit https://pypi.python.org/pypi/pip - - 2. Execute the following in bash shell: - - .. code-block:: bash - - $ pip install azure-cosmos - -To run tests: -------------- - - .. code-block:: bash - - $ python -m unittest discover -s .\test -p "*.py" - - If you use Microsoft Visual Studio, open the project file python.pyproj, - and run all the tests in Test Explorer. - - -To generate documentations: ---------------------------- - - Install Sphinx: http://sphinx-doc.org/install.html - - .. code-block:: bash - - $ cd doc - $ sphinx-apidoc -f -e -o api ..\azure - $ make.bat html - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - -.. toctree:: - :hidden: - - api/azure - api/modules \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/doc/make.bat b/sdk/cosmos/azure-cosmos/doc/make.bat deleted file mode 100644 index 2b4d4c3a9092..000000000000 --- a/sdk/cosmos/azure-cosmos/doc/make.bat +++ /dev/null @@ -1,242 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\azure-cosmos.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\azure-cosmos.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/sdk/cosmos/azure-cosmos/requirements.txt b/sdk/cosmos/azure-cosmos/requirements.txt index 5d92f68f2287..1d2d3a185c90 100644 --- a/sdk/cosmos/azure-cosmos/requirements.txt +++ b/sdk/cosmos/azure-cosmos/requirements.txt @@ -1,2 +1,2 @@ -azure-core<2.0.0,>=1.0.0b3 +azure-core<2.0.0,>=1.0.0b4 six>=1.6 \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/setup.py b/sdk/cosmos/azure-cosmos/setup.py index fbf49b5e4527..7972b7525768 100644 --- a/sdk/cosmos/azure-cosmos/setup.py +++ b/sdk/cosmos/azure-cosmos/setup.py @@ -63,14 +63,13 @@ "samples.Shared", "samples.Shared.config", "test", - "doc", # Exclude packages that will be covered by PEP420 or nspkg "azure", ] ), install_requires=[ 'six >=1.6', - 'azure-core<2.0.0,>=1.0.0b3' + 'azure-core<2.0.0,>=1.0.0b4' ], extras_require={ ":python_version<'3.4'": ['enum34>=1.0.4'], diff --git a/shared_requirements.txt b/shared_requirements.txt index 9e64ea4a1f70..48e6b432b094 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -111,7 +111,6 @@ opencensus-ext-azure>=0.3.1 #override azure-keyvault-certificates azure-core<2.0.0,>=1.0.0b2 #override azure-keyvault-keys azure-core<2.0.0,>=1.0.0b2 #override azure-keyvault-secrets azure-core<2.0.0,>=1.0.0b2 -#override azure-cosmos azure-core<2.0.0,>=1.0.0b3 #override azure-eventhub-checkpointstoreblob-aio azure-storage-blob<=12.0.0b4,>=12.0.0b2 #override azure-eventhub-checkpointstoreblob-aio aiohttp<4.0,>=3.0 #override azure-eventhub uamqp<2.0,>=1.2.3 From dcd9276bc60e07fb45e9959a1448b235a9e82cc2 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Tue, 8 Oct 2019 19:11:21 -0700 Subject: [PATCH 24/29] dummy commit --- sdk/cosmos/azure-cosmos/test/crud_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 9071f96071b3..6d9ec3ab6f66 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -1958,7 +1958,7 @@ def _test_create_indexing_policy_with_composite_and_spatial_indexes(self, is_nam read_indexing_policy = created_container['indexingPolicy'] self.assertListEqual(indexing_policy['spatialIndexes'], read_indexing_policy['spatialIndexes']) self.assertListEqual(indexing_policy['compositeIndexes'], read_indexing_policy['compositeIndexes']) - self.client.DeleteContainer(created_container['_self']) + self.client.DeleteContainer(created_container['_self']) def _check_default_indexing_policy_paths(self, indexing_policy): def __get_first(array): From 12474a10e00f43cacf20bcc9f9db32eafa94acfc Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Tue, 8 Oct 2019 19:11:42 -0700 Subject: [PATCH 25/29] reverted dummy commit --- sdk/cosmos/azure-cosmos/test/crud_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 6d9ec3ab6f66..9071f96071b3 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -1958,7 +1958,7 @@ def _test_create_indexing_policy_with_composite_and_spatial_indexes(self, is_nam read_indexing_policy = created_container['indexingPolicy'] self.assertListEqual(indexing_policy['spatialIndexes'], read_indexing_policy['spatialIndexes']) self.assertListEqual(indexing_policy['compositeIndexes'], read_indexing_policy['compositeIndexes']) - self.client.DeleteContainer(created_container['_self']) + self.client.DeleteContainer(created_container['_self']) def _check_default_indexing_policy_paths(self, indexing_policy): def __get_first(array): From 02da658f23404c35e784d067c8f8ef5c90d696a1 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 10 Oct 2019 11:09:30 -0700 Subject: [PATCH 26/29] fixed failing test fixed lint errors --- .../azure/cosmos/_cosmos_client_connection.py | 3 ++- .../_execution_context/base_execution_context.py | 1 - .../cosmos/_execution_context/endpoint_component.py | 2 +- .../_execution_context/execution_dispatcher.py | 12 ++++++++---- sdk/cosmos/azure-cosmos/test/query_tests.py | 5 +++-- sdk/cosmos/azure-cosmos/test/sample.py | 5 ----- 6 files changed, 14 insertions(+), 14 deletions(-) delete mode 100644 sdk/cosmos/azure-cosmos/test/sample.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 4b19ad010aa4..6d0b3c8ef9eb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2668,7 +2668,8 @@ def __GetBodiesFromQueryResult(result): # Copy to make sure that default_headers won't be changed. if query is None: # Query operations will use ReadEndpoint even though it uses GET(for feed requests) - request_params = _request_object.RequestObject(typ, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) + request_params = _request_object.RequestObject(typ, + documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py index b407f3cd03c4..ee4981d06bd8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py @@ -25,7 +25,6 @@ from collections import deque from .. import _retry_utility from .. import http_constants -from .. import _base # pylint: disable=protected-access diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 7dd089617cbd..254c4264224d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -108,7 +108,7 @@ def __init__(self, execution_context): def make_hash(self, value): if isinstance(value, (set, tuple, list)): return tuple([self.make_hash(v) for v in value]) - elif not isinstance(value, dict): + if not isinstance(value, dict): if isinstance(value, numbers.Number): return float(value) return value diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index 798f7241db86..49a5c14befb5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -80,7 +80,8 @@ def next(self): except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_to_use = self._query if self._query is not None else "Select * from root r" - query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway(query_to_use, self._resource_link)) + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) self._execution_context = self._create_pipelined_execution_context(query_execution_info) else: raise e @@ -102,7 +103,8 @@ def fetch_next_block(self): except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_to_use = self._query if self._query is not None else "Select * from root r" - query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway(query_to_use, self._resource_link)) + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) self._execution_context = self._create_pipelined_execution_context(query_execution_info) else: raise e @@ -113,8 +115,10 @@ def _create_pipelined_execution_context(self, query_execution_info): assert self._resource_link, "code bug, resource_link is required." if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): - if self._options and ("enableCrossPartitionQuery" in self._options and self._options["enableCrossPartitionQuery"]): - raise CosmosHttpResponseError(StatusCodes.BAD_REQUEST, "Cross partition query only supports 'VALUE ' for aggregates") + if self._options and ("enableCrossPartitionQuery" in self._options + and self._options["enableCrossPartitionQuery"]): + raise CosmosHttpResponseError(StatusCodes.BAD_REQUEST, + "Cross partition query only supports 'VALUE ' for aggregates") execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, self._resource_link, diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 98962b45ea35..5f19ffd38f07 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -191,10 +191,11 @@ def test_max_item_count_honored_in_order_by_query(self): enable_cross_partition_query=True ) # 1 call to get query plans, 1 call to get pkr, 10 calls to one partion with the documents, 1 call each to other 4 partitions + # 1 extra call to get query plan via gateway if 'localhost' in self.host or '127.0.0.1' in self.host: # TODO: Differing result between live and emulator - self.validate_query_requests_count(query_iterable, 16 * 2) + self.validate_query_requests_count(query_iterable, 16 * 2 + 1) else: - self.validate_query_requests_count(query_iterable, 17 * 2) + self.validate_query_requests_count(query_iterable, 17 * 2 + 1) query_iterable = created_collection.query_items( query=query, diff --git a/sdk/cosmos/azure-cosmos/test/sample.py b/sdk/cosmos/azure-cosmos/test/sample.py deleted file mode 100644 index bf964dfb06e2..000000000000 --- a/sdk/cosmos/azure-cosmos/test/sample.py +++ /dev/null @@ -1,5 +0,0 @@ -from azure.cosmos.cosmos_client import CosmosClient -c = CosmosClient("https://java-async-gated.documents-staging.windows-ppe.net:443/", {'masterKey': "LyiYjQopDScUDPLeN6Myn4umLwFoJCttLpwpf9OoIvsyroPazV83EEwb9k7N8ANqORA4QF60mtjwwwgqfm9yVg=="}) -cc = c.get_database_client("mydb").get_container_client("mycoll") -doc = cc.upsert_item(body={'id': '1', 'pk': '1', 'i': 4}) -print(doc) \ No newline at end of file From 59fa49b417f65ba2a840ede46bd01972823e1947 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 10 Oct 2019 11:58:02 -0700 Subject: [PATCH 27/29] fixed failing tests --- sdk/cosmos/azure-cosmos/test/aggregate_tests.py | 4 ++-- sdk/cosmos/azure-cosmos/test/query_tests.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index ec18e4d7071d..15e7f687d765 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -65,8 +65,8 @@ def _setup(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(_config.host, - {'masterKey': _config.master_key}, "Session", _config.connection_policy) + cls.client = cosmos_client.CosmosClient( + _config.host, {'masterKey': _config.master_key}, "Session", connection_policy=_config.connection_policy) created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = cls._create_collection(created_db) diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 5f19ffd38f07..2c92f389a097 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -193,7 +193,7 @@ def test_max_item_count_honored_in_order_by_query(self): # 1 call to get query plans, 1 call to get pkr, 10 calls to one partion with the documents, 1 call each to other 4 partitions # 1 extra call to get query plan via gateway if 'localhost' in self.host or '127.0.0.1' in self.host: # TODO: Differing result between live and emulator - self.validate_query_requests_count(query_iterable, 16 * 2 + 1) + self.validate_query_requests_count(query_iterable, 15 * 2 + 1) else: self.validate_query_requests_count(query_iterable, 17 * 2 + 1) @@ -277,7 +277,7 @@ def test_unsupported_queries(self): try: list(query_iterable) self.fail() - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, 400) def test_query_with_non_overlapping_pk_ranges(self): From dcc1b5ca679fb0c2f0cf2a3ac7dbf70a53604bae Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Thu, 10 Oct 2019 13:16:07 -0700 Subject: [PATCH 28/29] updated comment --- sdk/cosmos/azure-cosmos/test/query_tests.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index 2c92f389a097..8dbe6a01e815 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -190,8 +190,6 @@ def test_max_item_count_honored_in_order_by_query(self): max_item_count=1, enable_cross_partition_query=True ) - # 1 call to get query plans, 1 call to get pkr, 10 calls to one partion with the documents, 1 call each to other 4 partitions - # 1 extra call to get query plan via gateway if 'localhost' in self.host or '127.0.0.1' in self.host: # TODO: Differing result between live and emulator self.validate_query_requests_count(query_iterable, 15 * 2 + 1) else: From 83664b167a1a0090eefc676ebf6145c429fadac6 Mon Sep 17 00:00:00 2001 From: Srinath Narayanan Date: Fri, 11 Oct 2019 10:36:06 -0700 Subject: [PATCH 29/29] added **kwargs to _GetQueryPlanThroughGateway --- .../azure-cosmos/azure/cosmos/_cosmos_client_connection.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 6d0b3c8ef9eb..a1360e24aa54 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2702,7 +2702,7 @@ def __GetBodiesFromQueryResult(result): return __GetBodiesFromQueryResult(result) - def _GetQueryPlanThroughGateway(self, query, resource_link): + def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): supported_query_features = (documents._QueryFeature.Aggregate + "," + documents._QueryFeature.CompositeAggregate + "," + documents._QueryFeature.Distinct + "," + @@ -2729,7 +2729,8 @@ def _GetQueryPlanThroughGateway(self, query, resource_link): None, query, options, - is_query_plan=True) + is_query_plan=True, + **kwargs) def __CheckAndUnifyQueryFormat(self, query_body): """Checks and unifies the format of the query body.