Skip to content

Commit

Permalink
Update handling error in query (#20765)
Browse files Browse the repository at this point in the history
* samples

* hide row

* Handle errors

* lint

* lint

* extract inner message

* lint

* lint

* lint

* api view fixes

* more changes

* Update sdk/monitor/azure-monitor-query/CHANGELOG.md

* Apply suggestions from code review

Co-authored-by: Krista Pratico <[email protected]>

* changes

Co-authored-by: Krista Pratico <[email protected]>
Rakshith Bhyravabhotla and kristapratico authored Sep 24, 2021

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
1 parent 1a9b633 commit 0475f51
Showing 17 changed files with 459 additions and 789 deletions.
8 changes: 5 additions & 3 deletions sdk/monitor/azure-monitor-query/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -4,14 +4,16 @@

### Features Added

- Added `QueryPartialErrorException` and `LogsQueryError` to handle errors.
- Added `partial_error` and `is_error` attributes to `LogsQueryResult`.
- Added an option `allow_partial_errors` that defaults to False, which can be set to not throw if there are any partial errors.
- Added `LogsQueryPartialResult` and `LogsQueryError` to handle errors.
- Added `status` attribute to `LogsQueryResult`.
- Added `LogsQueryStatus` Enum to describe the status of a result.
- Added a new `LogsTableRow` type that represents a single row in a table.

### Breaking Changes

- `LogsQueryResult` now iterates over the tables directly as a convinience.
- `query` API now returns a union of `LogsQueryPartialResult` and `LogsQueryResult`.
- `query_batch` API now returns a union of `LogsQueryPartialResult`, `LogsQueryError` and `LogsQueryResult`.
- `metric_namespace` is renamed to `namespace` and is a keyword-only argument in `list_metric_definitions` API.

### Bugs Fixed
14 changes: 7 additions & 7 deletions sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py
Original file line number Diff line number Diff line change
@@ -7,15 +7,14 @@
from ._logs_query_client import LogsQueryClient
from ._metrics_query_client import MetricsQueryClient

from ._exceptions import (
LogsQueryError,
QueryPartialErrorException
)
from ._exceptions import LogsQueryError

from ._models import (
MetricAggregationType,
LogsQueryResult,
LogsTable,
LogsQueryPartialResult,
LogsQueryStatus,
LogsTableRow,
MetricsResult,
LogsBatchQuery,
@@ -27,7 +26,7 @@
Metric,
MetricValue,
MetricClass,
MetricAvailability
MetricAvailability,
)

from ._version import VERSION
@@ -36,8 +35,9 @@
"MetricAggregationType",
"LogsQueryClient",
"LogsQueryResult",
"LogsQueryPartialResult",
"LogsQueryStatus",
"LogsQueryError",
"QueryPartialErrorException",
"LogsTable",
"LogsTableRow",
"LogsBatchQuery",
@@ -51,7 +51,7 @@
"Metric",
"MetricValue",
"MetricClass",
"MetricAvailability"
"MetricAvailability",
]

__version__ = VERSION
70 changes: 16 additions & 54 deletions sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,8 @@
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
from ._models import LogsQueryStatus


class LogsQueryError(object):
"""The code and message for an error.
@@ -15,65 +16,26 @@ class LogsQueryError(object):
:vartype code: str
:ivar message: A human readable error message.
:vartype message: str
:ivar details: error details.
:vartype details: list[~monitor_query_client.models.ErrorDetail]
:ivar innererror: Inner error details if they exist.
:vartype innererror: ~azure.monitor.query.LogsQueryError
:ivar additional_properties: Additional properties that can be provided on the error info
object.
:vartype additional_properties: object
:ivar bool is_error: Boolean check for error item when iterating over list of
results. Always True for an instance of a LogsQueryError.
:ivar status: status for error item when iterating over list of
results. Always "Failure" for an instance of a LogsQueryError.
:vartype status: ~azure.monitor.query.LogsQueryStatus
"""
def __init__(
self,
**kwargs
):
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.details = kwargs.get('details', None)
self.innererror = kwargs.get('innererror', None)
self.additional_properties = kwargs.get('additional_properties', None)
self.is_error = True

def __init__(self, **kwargs):
self.code = kwargs.get("code", None)
self.message = kwargs.get("message", None)
self.status = LogsQueryStatus.FAILURE

@classmethod
def _from_generated(cls, generated):
if not generated:
return None
details = None
if generated.details is not None:
details = [d.serialize() for d in generated.details]

innererror = generated
while innererror.innererror is not None:
innererror = innererror.innererror
message = innererror.message
return cls(
code=generated.code,
message=generated.message,
innererror=cls._from_generated(generated.innererror) if generated.innererror else None,
additional_properties=generated.additional_properties,
details=details,
message=message,
)

class QueryPartialErrorException(HttpResponseError):
"""There is a partial failure in query operation. This is thrown for a single query operation
when allow_partial_errors is set to False.
:ivar code: A machine readable error code.
:vartype code: str
:ivar message: A human readable error message.
:vartype message: str
:ivar details: error details.
:vartype details: list[~monitor_query_client.models.ErrorDetail]
:ivar innererror: Inner error details if they exist.
:vartype innererror: ~azure.monitor.query.LogsQueryError
:ivar additional_properties: Additional properties that can be provided on the error info
object.
:vartype additional_properties: object
"""

def __init__(self, **kwargs):
error = kwargs.pop('error', None)
if error:
self.code = error.code
self.message = error.message
self.details = [d.serialize() for d in error.details] if error.details else None
self.innererror = LogsQueryError._from_generated(error.innererror) if error.innererror else None
self.additional_properties = error.additional_properties
super(QueryPartialErrorException, self).__init__(message=self.message)
84 changes: 52 additions & 32 deletions sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py
Original file line number Diff line number Diff line change
@@ -13,101 +13,121 @@
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential


def get_authentication_policy(
credential, # type: TokenCredential
credential, # type: TokenCredential
):
# type: (...) -> BearerTokenCredentialPolicy
"""Returns the correct authentication policy
"""
"""Returns the correct authentication policy"""

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if hasattr(credential, "get_token"):
return BearerTokenCredentialPolicy(credential, "https://api.loganalytics.io/.default")
return BearerTokenCredentialPolicy(
credential, "https://api.loganalytics.io/.default"
)

raise TypeError("Unsupported credential")


def get_metrics_authentication_policy(
credential, # type: TokenCredential
credential, # type: TokenCredential
):
# type: (...) -> BearerTokenCredentialPolicy
"""Returns the correct authentication policy
"""
"""Returns the correct authentication policy"""

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if hasattr(credential, "get_token"):
return BearerTokenCredentialPolicy(credential, "https://management.azure.com/.default")
return BearerTokenCredentialPolicy(
credential, "https://management.azure.com/.default"
)

raise TypeError("Unsupported credential")

def order_results(request_order, mapping, obj, err, allow_partial_errors=False):

def order_results(request_order, mapping, **kwargs):
ordered = [mapping[id] for id in request_order]
results = []
for item in ordered:
if not item.body.error:
results.append(obj._from_generated(item.body)) # pylint: disable=protected-access
results.append(
kwargs.get("obj")._from_generated(item.body) # pylint: disable=protected-access
)
else:
error = item.body.error
if allow_partial_errors and error.code == 'PartialError':
res = obj._from_generated(item.body) # pylint: disable=protected-access
res.partial_error = err._from_generated(error) # pylint: disable=protected-access
if error.code == "PartialError":
res = kwargs.get("partial_err")._from_generated( # pylint: disable=protected-access
item.body, kwargs.get("raise_with")
)
results.append(res)
else:
results.append(err._from_generated(error)) # pylint: disable=protected-access
results.append(
kwargs.get("err")._from_generated(error) # pylint: disable=protected-access
)
return results


def construct_iso8601(timespan=None):
if not timespan:
return None
try:
start, end, duration = None, None, None
if isinstance(timespan[1], datetime): # we treat thi as start_time, end_time
if isinstance(timespan[1], datetime): # we treat thi as start_time, end_time
start, end = timespan[0], timespan[1]
elif isinstance(timespan[1], timedelta): # we treat this as start_time, duration
elif isinstance(
timespan[1], timedelta
): # we treat this as start_time, duration
start, duration = timespan[0], timespan[1]
else:
raise ValueError('Tuple must be a start datetime with a timedelta or an end datetime.')
raise ValueError(
"Tuple must be a start datetime with a timedelta or an end datetime."
)
except TypeError:
duration = timespan # it means only duration (timedelta) is provideds
duration = timespan # it means only duration (timedelta) is provideds
if duration:
try:
duration = 'PT{}S'.format(duration.total_seconds())
duration = "PT{}S".format(duration.total_seconds())
except AttributeError:
raise ValueError('timespan must be a timedelta or a tuple.')
raise ValueError("timespan must be a timedelta or a tuple.")
iso_str = None
if start is not None:
start = Serializer.serialize_iso(start)
if end is not None:
end = Serializer.serialize_iso(end)
iso_str = start + '/' + end
iso_str = start + "/" + end
elif duration is not None:
iso_str = start + '/' + duration
else: # means that an invalid value None that is provided with start_time
raise ValueError("Duration or end_time cannot be None when provided with start_time.")
iso_str = start + "/" + duration
else: # means that an invalid value None that is provided with start_time
raise ValueError(
"Duration or end_time cannot be None when provided with start_time."
)
else:
iso_str = duration
return iso_str


def native_col_type(col_type, value):
if col_type == 'datetime':
if col_type == "datetime":
value = Deserializer.deserialize_iso(value)
elif col_type in ('timespan', 'guid'):
elif col_type in ("timespan", "guid"):
value = str(value)
return value


def process_row(col_types, row):
return [native_col_type(col_types[ind], val) for ind, val in enumerate(row)]


def process_error(error, model):
try:
model = model._from_generated(error.model.error) # pylint: disable=protected-access
except AttributeError: # model can be none
model = model._from_generated( # pylint: disable=protected-access
error.model.error
)
except AttributeError: # model can be none
pass
raise HttpResponseError(
message=error.message,
response=error.response,
model=model)
raise HttpResponseError(message=error.message, response=error.response, model=model)


def process_prefer(server_timeout, include_statistics, include_visualization):
prefer = ""
Original file line number Diff line number Diff line change
@@ -5,16 +5,22 @@
# license information.
# --------------------------------------------------------------------------

from typing import TYPE_CHECKING, Any, Union, Sequence, Dict, List
from typing import TYPE_CHECKING, Any, Union, Sequence, Dict, List, cast
from azure.core.exceptions import HttpResponseError
from azure.core.tracing.decorator import distributed_trace

from ._generated._monitor_query_client import MonitorQueryClient

from ._generated.models import BatchRequest, QueryBody as LogsQueryBody
from ._helpers import get_authentication_policy, construct_iso8601, order_results, process_error, process_prefer
from ._models import LogsBatchQuery, LogsQueryResult
from ._exceptions import LogsQueryError, QueryPartialErrorException
from ._helpers import (
get_authentication_policy,
construct_iso8601,
order_results,
process_error,
process_prefer,
)
from ._models import LogsBatchQuery, LogsQueryResult, LogsQueryPartialResult
from ._exceptions import LogsQueryError

if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
@@ -42,7 +48,7 @@ class LogsQueryClient(object):
def __init__(self, credential, **kwargs):
# type: (TokenCredential, Any) -> None

self._endpoint = kwargs.pop('endpoint', 'https://api.loganalytics.io/v1')
self._endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1")
self._client = MonitorQueryClient(
credential=credential,
authentication_policy=get_authentication_policy(credential),
@@ -53,7 +59,7 @@ def __init__(self, credential, **kwargs):

@distributed_trace
def query(self, workspace_id, query, **kwargs):
# type: (str, str, Any) -> LogsQueryResult
# type: (str, str, Any) -> Union[LogsQueryResult, LogsQueryPartialResult]
"""Execute an Analytics query.
Executes an Analytics query for data.
@@ -64,7 +70,7 @@ def query(self, workspace_id, query, **kwargs):
:param query: The Kusto query. Learn more about the `Kusto query syntax
<https://docs.microsoft.com/azure/data-explorer/kusto/query/>`_.
:type query: str
:keyword timespan: The timespan for which to query the data. This can be a timedelta,
:keyword timespan: Required. The timespan for which to query the data. This can be a timedelta,
a timedelta and a start datetime, or a start datetime/end datetime.
:paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta]
or tuple[~datetime.datetime, ~datetime.datetime]
@@ -77,10 +83,8 @@ def query(self, workspace_id, query, **kwargs):
:keyword additional_workspaces: A list of workspaces that are included in the query.
These can be qualified workspace names, workspace Ids, or Azure resource Ids.
:paramtype additional_workspaces: list[str]
:keyword allow_partial_errors: Defaults to False. If set to true, partial errors are not thrown.
:paramtype allow_partial_errors: bool
:return: LogsQueryResult, or the result of cls(response)
:rtype: ~azure.monitor.query.LogsQueryResult
:rtype: Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult]
:raises: ~azure.core.exceptions.HttpResponseError
.. admonition:: Example:
@@ -92,58 +96,60 @@ def query(self, workspace_id, query, **kwargs):
:dedent: 0
:caption: Get a response for a single Log Query
"""
allow_partial_errors = kwargs.pop('allow_partial_errors', False)
if 'timespan' not in kwargs:
raise TypeError("query() missing 1 required keyword-only argument: 'timespan'")
timespan = construct_iso8601(kwargs.pop('timespan'))
if "timespan" not in kwargs:
raise TypeError(
"query() missing 1 required keyword-only argument: 'timespan'"
)
timespan = construct_iso8601(kwargs.pop("timespan"))
include_statistics = kwargs.pop("include_statistics", False)
include_visualization = kwargs.pop("include_visualization", False)
server_timeout = kwargs.pop("server_timeout", None)
workspaces = kwargs.pop("additional_workspaces", None)

prefer = process_prefer(server_timeout, include_statistics, include_visualization)
prefer = process_prefer(
server_timeout, include_statistics, include_visualization
)

body = LogsQueryBody(
query=query,
timespan=timespan,
workspaces=workspaces,
**kwargs
query=query, timespan=timespan, workspaces=workspaces, **kwargs
)

try:
generated_response = self._query_op.execute( # pylint: disable=protected-access
workspace_id=workspace_id,
body=body,
prefer=prefer,
**kwargs
generated_response = (
self._query_op.execute( # pylint: disable=protected-access
workspace_id=workspace_id, body=body, prefer=prefer, **kwargs
)
)
except HttpResponseError as err:
process_error(err, LogsQueryError)
response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access
response = None
if not generated_response.error:
return response
if not allow_partial_errors:
raise QueryPartialErrorException(error=generated_response.error)
response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access
generated_response.error
response = LogsQueryResult._from_generated( # pylint: disable=protected-access
generated_response
)
else:
response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access
generated_response, LogsQueryError
)
return response

@distributed_trace
def query_batch(self, queries, **kwargs):
# type: (Union[Sequence[Dict], Sequence[LogsBatchQuery]], Any) -> List[LogsQueryResult]
def query_batch(
self,
queries, # type: Union[Sequence[Dict], Sequence[LogsBatchQuery]]
**kwargs # type: Any
):
# type: (...) -> List[Union[LogsQueryResult, LogsQueryPartialResult, LogsQueryError]]
"""Execute a list of analytics queries. Each request can be either a LogQueryRequest
object or an equivalent serialized model.
The response is returned in the same order as that of the requests sent.
:param queries: The list of Kusto queries to execute.
:type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery]
:keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned
when a partial error occurs. The error can be accessed using the `partial_error`
attribute in the object.
:return: List of LogsQueryResult, or the result of cls(response)
:rtype: list[~azure.monitor.query.LogsQueryResult]
:rtype: list[Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult,
~azure.monitor.query.LogsQueryError]
:raises: ~azure.core.exceptions.HttpResponseError
.. admonition:: Example:
@@ -155,25 +161,28 @@ def query_batch(self, queries, **kwargs):
:dedent: 0
:caption: Get a response for multiple Log Queries.
"""
allow_partial_errors = kwargs.pop('allow_partial_errors', False)
try:
queries = [LogsBatchQuery(**q) for q in queries]
except (KeyError, TypeError):
pass
queries = [q._to_generated() for q in queries] # pylint: disable=protected-access
queries = [
cast(LogsBatchQuery, q)._to_generated() for q in queries # pylint: disable=protected-access
]
try:
request_order = [req.id for req in queries]
except AttributeError:
request_order = [req['id'] for req in queries]
request_order = [req["id"] for req in queries]
batch = BatchRequest(requests=queries)
generated = self._query_op.batch(batch, **kwargs)
mapping = {item.id: item for item in generated.responses}
return order_results(
request_order,
mapping,
LogsQueryResult,
LogsQueryError,
allow_partial_errors)
obj=LogsQueryResult,
err=LogsQueryError,
partial_err=LogsQueryPartialResult,
raise_with=LogsQueryError,
)

def close(self):
# type: () -> None
Original file line number Diff line number Diff line change
@@ -44,7 +44,7 @@ class MetricsQueryClient(object):

def __init__(self, credential, **kwargs):
# type: (TokenCredential, Any) -> None
endpoint = kwargs.pop('endpoint', 'https://management.azure.com')
endpoint = kwargs.pop("endpoint", "https://management.azure.com")
self._client = MonitorQueryClient(
credential=credential,
base_url=endpoint,
@@ -116,8 +116,12 @@ def query(self, resource_uri, metric_names, **kwargs):
kwargs.setdefault("top", kwargs.pop("max_results", None))
kwargs.setdefault("interval", kwargs.pop("granularity", None))
kwargs.setdefault("orderby", kwargs.pop("order_by", None))
generated = self._metrics_op.list(resource_uri, connection_verify=False, **kwargs)
return MetricsResult._from_generated(generated) # pylint: disable=protected-access
generated = self._metrics_op.list(
resource_uri, connection_verify=False, **kwargs
)
return MetricsResult._from_generated( # pylint: disable=protected-access
generated
)

@distributed_trace
def list_metric_namespaces(self, resource_uri, **kwargs):
@@ -133,7 +137,7 @@ def list_metric_namespaces(self, resource_uri, **kwargs):
:rtype: ~azure.core.paging.ItemPaged[~azure.monitor.query.MetricNamespace]
:raises: ~azure.core.exceptions.HttpResponseError
"""
start_time = kwargs.pop('start_time', None)
start_time = kwargs.pop("start_time", None)
if start_time:
start_time = Serializer.serialize_iso(start_time)
return self._namespace_op.list(
@@ -142,10 +146,12 @@ def list_metric_namespaces(self, resource_uri, **kwargs):
cls=kwargs.pop(
"cls",
lambda objs: [
MetricNamespace._from_generated(x) for x in objs # pylint: disable=protected-access
]
MetricNamespace._from_generated(x) # pylint: disable=protected-access
for x in objs
],
),
**kwargs)
**kwargs
)

@distributed_trace
def list_metric_definitions(self, resource_uri, **kwargs):
@@ -160,17 +166,19 @@ def list_metric_definitions(self, resource_uri, **kwargs):
:rtype: ~azure.core.paging.ItemPaged[~azure.monitor.query.MetricDefinition]
:raises: ~azure.core.exceptions.HttpResponseError
"""
metric_namespace = kwargs.pop('namespace', None)
metric_namespace = kwargs.pop("namespace", None)
return self._definitions_op.list(
resource_uri,
metric_namespace,
cls=kwargs.pop(
"cls",
lambda objs: [
MetricDefinition._from_generated(x) for x in objs # pylint: disable=protected-access
]
MetricDefinition._from_generated(x) # pylint: disable=protected-access
for x in objs
],
),
**kwargs)
**kwargs
)

def close(self):
# type: () -> None
323 changes: 187 additions & 136 deletions sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -7,7 +7,4 @@
from ._logs_query_client_async import LogsQueryClient
from ._metrics_query_client_async import MetricsQueryClient

__all__ = [
"LogsQueryClient",
"MetricsQueryClient"
]
__all__ = ["LogsQueryClient", "MetricsQueryClient"]
Original file line number Diff line number Diff line change
@@ -8,30 +8,34 @@
from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy

if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
from azure.core.credentials_async import AsyncTokenCredential


def get_authentication_policy(
credential: 'TokenCredential'
credential: "AsyncTokenCredential",
) -> AsyncBearerTokenCredentialPolicy:
"""Returns the correct authentication policy
"""
"""Returns the correct authentication policy"""

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if hasattr(credential, "get_token"):
return AsyncBearerTokenCredentialPolicy(credential, "https://api.loganalytics.io/.default")
return AsyncBearerTokenCredentialPolicy(
credential, "https://api.loganalytics.io/.default"
)

raise TypeError("Unsupported credential")


def get_metrics_authentication_policy(
credential: 'TokenCredential'
credential: "AsyncTokenCredential",
) -> AsyncBearerTokenCredentialPolicy:
"""Returns the correct authentication policy
"""
"""Returns the correct authentication policy"""

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if hasattr(credential, "get_token"):
return AsyncBearerTokenCredentialPolicy(credential, "https://management.azure.com/.default")
return AsyncBearerTokenCredentialPolicy(
credential, "https://management.azure.com/.default"
)

raise TypeError("Unsupported credential")
Original file line number Diff line number Diff line change
@@ -14,9 +14,9 @@

from .._generated.models import BatchRequest, QueryBody as LogsQueryBody
from .._helpers import construct_iso8601, order_results, process_error, process_prefer
from .._models import LogsQueryResult, LogsBatchQuery
from .._models import LogsQueryResult, LogsBatchQuery, LogsQueryPartialResult
from ._helpers_asyc import get_authentication_policy
from .._exceptions import LogsQueryError, QueryPartialErrorException
from .._exceptions import LogsQueryError

if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential
@@ -32,7 +32,7 @@ class LogsQueryClient(object):
"""

def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
self._endpoint = kwargs.pop('endpoint', 'https://api.loganalytics.io/v1')
self._endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1")
self._client = MonitorQueryClient(
credential=credential,
authentication_policy=get_authentication_policy(credential),
@@ -47,8 +47,11 @@ async def query(
workspace_id: str,
query: str,
*,
timespan: Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]],
**kwargs: Any) -> LogsQueryResult:
timespan: Union[
timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]
],
**kwargs: Any
) -> Union[LogsQueryResult, LogsQueryPartialResult]:
"""Execute an Analytics query.
Executes an Analytics query for data.
@@ -59,7 +62,7 @@ async def query(
:param query: The Kusto query. Learn more about the `Kusto query syntax
<https://docs.microsoft.com/azure/data-explorer/kusto/query/>`_.
:type query: str
:param timespan: The timespan for which to query the data. This can be a timedelta,
:param timespan: Required. The timespan for which to query the data. This can be a timedelta,
a timedelta and a start datetime, or a start datetime/end datetime.
:type timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta]
or tuple[~datetime.datetime, ~datetime.datetime]
@@ -73,83 +76,80 @@ async def query(
These can be qualified workspace names, workspace Ids or Azure resource Ids.
:paramtype additional_workspaces: list[str]
:return: QueryResults, or the result of cls(response)
:rtype: ~azure.monitor.query.LogsQueryResult
:rtype: ~azure.monitor.query.LogsQueryResult or ~azure.monitor.query.LogsQueryPartialResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
allow_partial_errors = kwargs.pop('allow_partial_errors', False)
timespan = construct_iso8601(timespan)
include_statistics = kwargs.pop("include_statistics", False)
include_visualization = kwargs.pop("include_visualization", False)
server_timeout = kwargs.pop("server_timeout", None)
additional_workspaces = kwargs.pop("additional_workspaces", None)

prefer = process_prefer(server_timeout, include_statistics, include_visualization)
prefer = process_prefer(
server_timeout, include_statistics, include_visualization
)

body = LogsQueryBody(
query=query,
timespan=timespan,
workspaces=additional_workspaces,
**kwargs
query=query, timespan=timespan, workspaces=additional_workspaces, **kwargs
)

try:
generated_response = await self._query_op.execute( # pylint: disable=protected-access
workspace_id=workspace_id,
body=body,
prefer=prefer,
**kwargs
generated_response = (
await self._query_op.execute( # pylint: disable=protected-access
workspace_id=workspace_id, body=body, prefer=prefer, **kwargs
)
)
except HttpResponseError as err:
process_error(err, LogsQueryError)
response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access
response = None
if not generated_response.error:
return response
if not allow_partial_errors:
raise QueryPartialErrorException(error=generated_response.error)
response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access
generated_response.error
response = LogsQueryResult._from_generated( # pylint: disable=protected-access
generated_response
)
else:
response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access
generated_response, LogsQueryError
)
return response

@distributed_trace_async
async def query_batch(
self,
queries: Union[Sequence[Dict], Sequence[LogsBatchQuery]],
**kwargs: Any
) -> List[LogsQueryResult]:
self, queries: Union[Sequence[Dict], Sequence[LogsBatchQuery]], **kwargs: Any
) -> List[Union[LogsQueryResult, LogsQueryError, LogsQueryPartialResult]]:
"""Execute a list of analytics queries. Each request can be either a LogQueryRequest
object or an equivalent serialized model.
The response is returned in the same order as that of the requests sent.
:param queries: The list of Kusto queries to execute.
:type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery]
:keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned
when a partial error occurs. The error can be accessed using the `partial_error`
attribute in the object.
:return: list of LogsQueryResult objects, or the result of cls(response)
:rtype: list[~azure.monitor.query.LogsQueryResult]
:rtype: list[~azure.monitor.query.LogsQueryResult or ~azure.monitor.query.LogsQueryPartialResult
or ~azure.monitor.query.LogsQueryError]
:raises: ~azure.core.exceptions.HttpResponseError
"""
allow_partial_errors = kwargs.pop('allow_partial_errors', False)
try:
queries = [LogsBatchQuery(**q) for q in queries]
queries = [LogsBatchQuery(**q) for q in queries] # type: ignore
except (KeyError, TypeError):
pass
queries = [q._to_generated() for q in queries] # pylint: disable=protected-access
queries = [
q._to_generated() for q in queries # pylint: disable=protected-access
]
try:
request_order = [req.id for req in queries]
except AttributeError:
request_order = [req['id'] for req in queries]
request_order = [req["id"] for req in queries]
batch = BatchRequest(requests=queries)
generated = await self._query_op.batch(batch, **kwargs)
mapping = {item.id: item for item in generated.responses}
return order_results(
request_order,
mapping,
LogsQueryResult,
LogsQueryError,
allow_partial_errors)
obj=LogsQueryResult,
err=LogsQueryError,
partial_err=LogsQueryPartialResult,
raise_with=LogsQueryError,
)

async def __aenter__(self) -> "LogsQueryClient":
await self._client.__aenter__()
Original file line number Diff line number Diff line change
@@ -25,6 +25,7 @@
if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential


class MetricsQueryClient(object):
"""MetricsQueryClient
@@ -35,7 +36,7 @@ class MetricsQueryClient(object):
"""

def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
endpoint = kwargs.pop('endpoint', 'https://management.azure.com')
endpoint = kwargs.pop("endpoint", "https://management.azure.com")
self._client = MonitorQueryClient(
credential=credential,
base_url=endpoint,
@@ -48,11 +49,8 @@ def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:

@distributed_trace_async
async def query(
self,
resource_uri: str,
metric_names: List,
**kwargs: Any
) -> MetricsResult:
self, resource_uri: str, metric_names: List, **kwargs: Any
) -> MetricsResult:
"""Lists the metric values for a resource.
**Note**: Although the start_time, end_time, duration are optional parameters, it is highly
@@ -95,7 +93,7 @@ async def query(
:rtype: ~azure.monitor.query.MetricsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
timespan = construct_iso8601(kwargs.pop('timespan', None))
timespan = construct_iso8601(kwargs.pop("timespan", None))
kwargs.setdefault("metricnames", ",".join(metric_names))
kwargs.setdefault("timespan", timespan)
kwargs.setdefault("top", kwargs.pop("max_results", None))
@@ -104,11 +102,17 @@ async def query(
aggregations = kwargs.pop("aggregations", None)
if aggregations:
kwargs.setdefault("aggregation", ",".join(aggregations))
generated = await self._metrics_op.list(resource_uri, connection_verify=False, **kwargs)
return MetricsResult._from_generated(generated) # pylint: disable=protected-access
generated = await self._metrics_op.list(
resource_uri, connection_verify=False, **kwargs
)
return MetricsResult._from_generated( # pylint: disable=protected-access
generated
)

@distributed_trace
def list_metric_namespaces(self, resource_uri: str, **kwargs: Any) -> AsyncItemPaged[MetricNamespace]:
def list_metric_namespaces(
self, resource_uri: str, **kwargs: Any
) -> AsyncItemPaged[MetricNamespace]:
"""Lists the metric namespaces for the resource.
:param resource_uri: The identifier of the resource.
@@ -120,7 +124,7 @@ def list_metric_namespaces(self, resource_uri: str, **kwargs: Any) -> AsyncItemP
:rtype: ~azure.core.paging.AsyncItemPaged[:class: `~azure.monitor.query.MetricNamespace`]
:raises: ~azure.core.exceptions.HttpResponseError
"""
start_time = kwargs.pop('start_time', None)
start_time = kwargs.pop("start_time", None)
if start_time:
start_time = Serializer.serialize_iso(start_time)
return self._namespace_op.list(
@@ -129,17 +133,17 @@ def list_metric_namespaces(self, resource_uri: str, **kwargs: Any) -> AsyncItemP
cls=kwargs.pop(
"cls",
lambda objs: [
MetricNamespace._from_generated(x) for x in objs # pylint: disable=protected-access
]
MetricNamespace._from_generated(x) # pylint: disable=protected-access
for x in objs
],
),
**kwargs)
**kwargs
)

@distributed_trace
def list_metric_definitions(
self,
resource_uri: str,
**kwargs: Any
) -> AsyncItemPaged[MetricDefinition]:
self, resource_uri: str, **kwargs: Any
) -> AsyncItemPaged[MetricDefinition]:
"""Lists the metric definitions for the resource.
:param resource_uri: The identifier of the resource.
@@ -150,17 +154,19 @@ def list_metric_definitions(
:rtype: ~azure.core.paging.AsyncItemPaged[:class: `~azure.monitor.query.MetricDefinition`]
:raises: ~azure.core.exceptions.HttpResponseError
"""
metric_namespace = kwargs.pop('namespace', None)
metric_namespace = kwargs.pop("namespace", None)
return self._definitions_op.list(
resource_uri,
metric_namespace,
cls=kwargs.pop(
"cls",
lambda objs: [
MetricDefinition._from_generated(x) for x in objs # pylint: disable=protected-access
]
MetricDefinition._from_generated(x) # pylint: disable=protected-access
for x in objs
],
),
**kwargs)
**kwargs
)

async def __aenter__(self) -> "MetricsQueryClient":
await self._client.__aenter__()
288 changes: 0 additions & 288 deletions sdk/monitor/azure-monitor-query/samples/champion_scenarios.md

This file was deleted.

26 changes: 16 additions & 10 deletions sdk/monitor/azure-monitor-query/samples/sample_batch_query.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@
from datetime import datetime, timedelta
import os
import pandas as pd
from azure.monitor.query import LogsQueryClient, LogsBatchQuery
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryStatus, LogsQueryPartialResult
from azure.identity import DefaultAzureCredential


@@ -33,17 +33,23 @@
include_statistics=True
),
]
responses = client.query_batch(requests, allow_partial_errors=False)

for response in responses:
if not response.is_error:
table = response.tables[0]
results = client.query_batch(requests)

for res in results:
if res.status == LogsQueryStatus.FAILURE:
# this will be a LogsQueryError
print(res.message)
elif res.status == LogsQueryStatus.PARTIAL:
## this will be a LogsQueryPartialResult
print(res.partial_error.message)
table = res.tables[0]
df = pd.DataFrame(table.rows, columns=table.columns)
print(df)
elif res.status == LogsQueryStatus.SUCCESS:
## this will be a LogsQueryResult
table = res.tables[0]
df = pd.DataFrame(table.rows, columns=table.columns)
print(df)
print("\n\n-------------------------\n\n")
else:
error = response
print(error.message)


# [END send_query_batch]
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@
import os
import pandas as pd
from datetime import timedelta
from azure.monitor.query import LogsQueryClient, QueryPartialErrorException
from azure.monitor.query import LogsQueryClient, LogsQueryStatus
from azure.core.exceptions import HttpResponseError
from azure.identity import DefaultAzureCredential

@@ -17,17 +17,18 @@
# Response time trend
# request duration over the last 12 hours.
# [START send_logs_query]
query = """AppRequests | take 5"""
query = """AppRadfequests | take 5"""

# returns LogsQueryResult
try:
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
if response.status == LogsQueryStatus.PARTIAL:
# handle error here
error = response.partial_error
print(error.message)
for table in response:
df = pd.DataFrame(data=table.rows, columns=table.columns)
print(df)
except QueryPartialErrorException as err:
print("this is a partial error")
print(err.details)
except HttpResponseError as err:
print("something fatal happened")
print (err)
Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@
import os
from azure.identity.aio import ClientSecretCredential
from azure.core.exceptions import HttpResponseError
from azure.monitor.query import LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException
from azure.monitor.query import LogsBatchQuery, LogsQueryError,LogsQueryResult, LogsQueryPartialResult
from azure.monitor.query.aio import LogsQueryClient

def _credential():
@@ -30,18 +30,8 @@ async def test_logs_single_query_partial_exception_not_allowed():
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
with pytest.raises(QueryPartialErrorException) as err:
await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))

@pytest.mark.live_test_only
@pytest.mark.asyncio
async def test_logs_single_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True)
response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
assert response.__class__ == LogsQueryPartialResult
assert response.partial_error is not None
assert response.partial_error.code == 'PartialError'
assert response.partial_error.__class__ == LogsQueryError
@@ -76,7 +66,7 @@ async def test_logs_batch_query_fatal_exception():
),
]
with pytest.raises(HttpResponseError):
await client.query_batch(requests, allow_partial_errors=True)
await client.query_batch(requests)

@pytest.mark.live_test_only
@pytest.mark.asyncio
@@ -90,38 +80,7 @@ async def test_logs_batch_query_partial_exception_not_allowed():
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = await client.query_batch(requests)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryError

@pytest.mark.live_test_only
@pytest.mark.asyncio
async def test_logs_batch_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
query= """bad query | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
@@ -134,38 +93,8 @@ async def test_logs_batch_query_partial_exception_allowed():
include_statistics=True
),
]
responses = await client.query_batch(requests, allow_partial_errors=True)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryResult
assert r3.partial_error is not None

@pytest.mark.live_test_only
@pytest.mark.asyncio
async def test_logs_batch_query_non_fatal_exception():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """Bad Query""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = await client.query_batch(requests)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryError
assert r2.__class__ == LogsQueryError
assert r3.__class__ == LogsQueryPartialResult
55 changes: 8 additions & 47 deletions sdk/monitor/azure-monitor-query/tests/test_exceptions.py
Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@
import os
from azure.identity import ClientSecretCredential
from azure.core.exceptions import HttpResponseError
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError, LogsQueryResult, LogsQueryPartialResult

def _credential():
credential = ClientSecretCredential(
@@ -21,24 +21,16 @@ def test_logs_single_query_fatal_exception():
client.query('bad_workspace_id', 'AppRequests', timespan=None)

@pytest.mark.live_test_only
def test_logs_single_query_partial_exception_not_allowed():
def test_logs_single_query_partial_exception():
credential = _credential()
client = LogsQueryClient(credential)
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
with pytest.raises(QueryPartialErrorException) as err:
client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))

@pytest.mark.live_test_only
def test_logs_single_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True)
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
assert response.__class__ == LogsQueryPartialResult
assert response.partial_error is not None
assert response.partial_data is not None
assert response.partial_error.code == 'PartialError'
assert response.partial_error.__class__ == LogsQueryError

@@ -71,10 +63,10 @@ def test_logs_batch_query_fatal_exception():
),
]
with pytest.raises(HttpResponseError):
responses = client.query_batch(requests, allow_partial_errors=True)
responses = client.query_batch(requests)

@pytest.mark.live_test_only
def test_logs_batch_query_partial_exception_not_allowed():
def test_logs_batch_query_partial_exception():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
@@ -101,38 +93,7 @@ def test_logs_batch_query_partial_exception_not_allowed():
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryError

@pytest.mark.live_test_only
def test_logs_batch_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = client.query_batch(requests, allow_partial_errors=True)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryResult
assert r3.partial_error is not None
assert r3.__class__ == LogsQueryPartialResult

@pytest.mark.live_test_only
def test_logs_batch_query_non_fatal_exception():
6 changes: 4 additions & 2 deletions sdk/monitor/azure-monitor-query/tests/test_logs_client.py
Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@
import os
from azure.identity import ClientSecretCredential
from azure.core.exceptions import HttpResponseError
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult, LogsTableRow
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult, LogsTableRow, LogsQueryPartialResult

def _credential():
credential = ClientSecretCredential(
@@ -58,9 +58,11 @@ def test_logs_single_query_with_partial_success():
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, allow_partial_errors=True)
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)

assert response.partial_error is not None
assert response.partial_data is not None
assert response.__class__ == LogsQueryPartialResult

@pytest.mark.skip("https://github.com/Azure/azure-sdk-for-python/issues/19917")
@pytest.mark.live_test_only

0 comments on commit 0475f51

Please sign in to comment.