From 83fca5e8f4b4b35126d99fade46efd491f2b928b Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Tue, 10 Jan 2023 19:18:35 -0800 Subject: [PATCH] [Monitor][Query] Improve typing (#28175) [Monitor][Query] Improve typing This enables the mypy, pyright, verifytypes checks in the CI, and also adds some typing improvements in order to pass the checks.. * Use class attribute style typing * Class ordering in models file was changed a bit to allow for class attribute typing. Signed-off-by: Paul Van Eck --- .../azure-monitor-query/azure/__init__.py | 2 +- .../azure/monitor/__init__.py | 2 +- .../azure/monitor/query/_exceptions.py | 39 +- .../azure/monitor/query/_helpers.py | 49 +- .../azure/monitor/query/_logs_query_client.py | 11 +- .../monitor/query/_metrics_query_client.py | 8 +- .../azure/monitor/query/_models.py | 723 +++++++++--------- .../azure/monitor/query/aio/_helpers_async.py | 6 +- .../query/aio/_logs_query_client_async.py | 14 +- .../query/aio/_metrics_query_client_async.py | 8 +- .../ci_tools/environment_exclusions.py | 3 - 11 files changed, 446 insertions(+), 419 deletions(-) diff --git a/sdk/monitor/azure-monitor-query/azure/__init__.py b/sdk/monitor/azure-monitor-query/azure/__init__.py index 0c36c2076ba0..69e3be50dac4 100644 --- a/sdk/monitor/azure-monitor-query/azure/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py index 0c36c2076ba0..69e3be50dac4 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py index 7353668bca92..8c819149cc06 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py @@ -4,33 +4,40 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from typing import Any +import sys +from typing import Any, List, Optional from ._models import LogsQueryStatus +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # pylint: disable=ungrouped-imports -class LogsQueryError(object): - """The code and message for an error. +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object - :ivar code: A machine readable error code. - :vartype code: str - :ivar message: A human readable error message. - :vartype message: str - :ivar details: A list of additional details about the error. - :vartype details: list[JSON] - :ivar status: status for error item when iterating over list of - results. Always "Failure" for an instance of a LogsQueryError. - :vartype status: ~azure.monitor.query.LogsQueryStatus - """ + +class LogsQueryError: + """The code and message for an error.""" + + code: str + """A machine readable error code.""" + message: str + """A human readable error message.""" + details: Optional[List[JSON]] = None + """A list of additional details about the error.""" + status: LogsQueryStatus + """Status for error item when iterating over list of results. Always "Failure" for an instance of a + LogsQueryError.""" def __init__(self, **kwargs: Any) -> None: - self.code = kwargs.get("code", None) - self.message = kwargs.get("message", None) + self.code = kwargs.get("code", "") + self.message = kwargs.get("message", "") self.details = kwargs.get("details", None) self.status = LogsQueryStatus.FAILURE - def __str__(self): + def __str__(self) -> str: return str(self.__dict__) @classmethod diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py index aec8060b72a4..64c2b44e51b8 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py @@ -5,7 +5,7 @@ # license information. # -------------------------------------------------------------------------- from datetime import datetime, timedelta -from typing import List, Dict, Any +from typing import List, Dict, Any, Optional from azure.core.credentials import TokenCredential from azure.core.exceptions import HttpResponseError @@ -16,7 +16,7 @@ def get_authentication_policy( credential: TokenCredential, - audience: str = None + audience: Optional[str] = None ) -> BearerTokenCredentialPolicy: """Returns the correct authentication policy""" if not audience: @@ -34,7 +34,7 @@ def get_authentication_policy( def get_metrics_authentication_policy( credential: TokenCredential, - audience: str = None + audience: Optional[str] = None ) -> BearerTokenCredentialPolicy: """Returns the correct authentication policy""" if not audience: @@ -55,28 +55,34 @@ def order_results(request_order: List, mapping: Dict[str, Any], **kwargs: Any) - results = [] for item in ordered: if not item["body"].get("error"): - results.append( - kwargs.get("obj")._from_generated(item["body"]) # pylint: disable=protected-access - ) + result_obj = kwargs.get("obj") + if result_obj: + results.append( + result_obj._from_generated(item["body"]) # pylint: disable=protected-access + ) else: error = item["body"]["error"] if error.get("code") == "PartialError": - res = kwargs.get("partial_err")._from_generated( # pylint: disable=protected-access - item["body"], kwargs.get("raise_with") - ) - results.append(res) + partial_err = kwargs.get("partial_err") + if partial_err: + res = partial_err._from_generated( # pylint: disable=protected-access + item["body"], kwargs.get("raise_with") + ) + results.append(res) else: - results.append( - kwargs.get("err")._from_generated(error) # pylint: disable=protected-access - ) + err = kwargs.get("err") + if err: + results.append( + err._from_generated(error) # pylint: disable=protected-access + ) return results -def construct_iso8601(timespan=None): +def construct_iso8601(timespan=None) -> Optional[str]: if not timespan: return None + start, end, duration = None, None, None try: - start, end, duration = None, None, None if isinstance(timespan[1], datetime): # we treat thi as start_time, end_time start, end = timespan[0], timespan[1] elif isinstance( @@ -89,9 +95,10 @@ def construct_iso8601(timespan=None): ) except TypeError: duration = timespan # it means only duration (timedelta) is provideds + duration_str = "" if duration: try: - duration = "PT{}S".format(duration.total_seconds()) + duration_str = "PT{}S".format(duration.total_seconds()) except AttributeError: raise ValueError("timespan must be a timedelta or a tuple.") iso_str = None @@ -99,15 +106,15 @@ def construct_iso8601(timespan=None): start = Serializer.serialize_iso(start) if end is not None: end = Serializer.serialize_iso(end) - iso_str = start + "/" + end - elif duration is not None: - iso_str = start + "/" + duration + iso_str = f"{start}/{end}" + elif duration_str: + iso_str = f"{start}/{duration_str}" else: # means that an invalid value None that is provided with start_time raise ValueError( "Duration or end_time cannot be None when provided with start_time." ) else: - iso_str = duration + iso_str = duration_str return iso_str @@ -124,7 +131,7 @@ def native_col_type(col_type, value): return value -def process_row(col_types, row): +def process_row(col_types, row) -> List[Any]: return [native_col_type(col_types[ind], val) for ind, val in enumerate(row)] diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py index 1f8be47361c0..e1f1eb6d0b24 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py @@ -107,7 +107,7 @@ def query_workspace( :dedent: 0 :caption: Get a response for a single Log Query """ - timespan = construct_iso8601(timespan) + timespan_iso = construct_iso8601(timespan) include_statistics = kwargs.pop("include_statistics", False) include_visualization = kwargs.pop("include_visualization", False) server_timeout = kwargs.pop("server_timeout", None) @@ -119,7 +119,7 @@ def query_workspace( body = { "query": query, - "timespan": timespan, + "timespan": timespan_iso, "workspaces": additional_workspaces } @@ -131,7 +131,8 @@ def query_workspace( ) except HttpResponseError as err: process_error(err, LogsQueryError) - response = None + + response: Union[LogsQueryResult, LogsQueryPartialResult] if not generated_response.get("error"): response = LogsQueryResult._from_generated( # pylint: disable=protected-access generated_response @@ -140,7 +141,7 @@ def query_workspace( response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access generated_response, LogsQueryError ) - return cast(Union[LogsQueryResult, LogsQueryPartialResult], response) + return response @distributed_trace def query_batch( @@ -200,5 +201,5 @@ def __enter__(self) -> "LogsQueryClient": self._client.__enter__() # pylint:disable=no-member return self - def __exit__(self, *args) -> None: + def __exit__(self, *args: Any) -> None: self._client.__exit__(*args) # pylint:disable=no-member diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py index 64ca1fd74dc1..c803f325be50 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py @@ -5,7 +5,7 @@ # license information. # -------------------------------------------------------------------------- # pylint: disable=anomalous-backslash-in-string -from typing import Any, List +from typing import Any, cast, List from azure.core.credentials import TokenCredential from azure.core.paging import ItemPaged @@ -140,7 +140,7 @@ def list_metric_namespaces(self, resource_uri: str, **kwargs: Any) -> ItemPaged[ start_time = kwargs.pop("start_time", None) if start_time: start_time = Serializer.serialize_iso(start_time) - return self._namespace_op.list( + res = self._namespace_op.list( resource_uri, start_time=start_time, cls=kwargs.pop( @@ -152,6 +152,7 @@ def list_metric_namespaces(self, resource_uri: str, **kwargs: Any) -> ItemPaged[ ), **kwargs ) + return cast(ItemPaged[MetricNamespace], res) @distributed_trace def list_metric_definitions(self, resource_uri: str, **kwargs: Any) -> ItemPaged[MetricDefinition]: @@ -166,7 +167,7 @@ def list_metric_definitions(self, resource_uri: str, **kwargs: Any) -> ItemPaged :raises: ~azure.core.exceptions.HttpResponseError """ metric_namespace = kwargs.pop("namespace", None) - return self._definitions_op.list( + res = self._definitions_op.list( resource_uri, metricnamespace=metric_namespace, cls=kwargs.pop( @@ -178,6 +179,7 @@ def list_metric_definitions(self, resource_uri: str, **kwargs: Any) -> ItemPaged ), **kwargs ) + return cast(ItemPaged[MetricDefinition], res) def close(self) -> None: """Close the :class:`~azure.monitor.query.MetricsQueryClient` session.""" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py index 544401e42a7b..4c59f849df20 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py @@ -8,60 +8,32 @@ from enum import Enum import uuid from datetime import datetime, timedelta -from typing import Any, Optional, List, Union, Tuple +import sys +from typing import Any, Optional, List, Union, Tuple, Dict, Iterator + from azure.core import CaseInsensitiveEnumMeta from ._generated._serialization import Deserializer from ._helpers import construct_iso8601, process_row +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # pylint: disable=ungrouped-imports -class LogsTable(object): - """Contains the columns and rows for one table in a query response. - - All required parameters must be populated in order to send to Azure. - - :ivar name: Required. The name of the table. - :vartype name: str - :ivar columns: The labels of columns in this table. - :vartype columns: list[str] - :ivar column_types: The types of columns in this table. - :vartype column_types: list[object] - :ivar rows: Required. The resulting rows from this query. - :vartype rows: list[~azure.monitor.query.LogsTableRow] - """ - def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.pop("name", None) # type: str - self.columns = kwargs.pop("columns", None) # type: Optional[str] - self.columns_types = kwargs.pop("column_types", None) # type: Optional[Any] - _rows = kwargs.pop("rows", None) - self.rows = [ - LogsTableRow( - row=row, - row_index=ind, - col_types=self.columns_types, - columns=self.columns, - ) - for ind, row in enumerate(_rows) - ] - - @classmethod - def _from_generated(cls, generated): - return cls( - name=generated.get("name"), - columns=[col["name"] for col in generated.get("columns", [])], - column_types=[col["type"] for col in generated.get("columns", [])], - rows=generated.get("rows"), - ) +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class LogsTableRow(object): +class LogsTableRow: """Represents a single row in logs table. - This type is gettable by both column name and column index. - :ivar int index: The index of the row in the table + This type is gettable by both column name and column index. """ + index: int + """The index of the row in the table""" + def __init__(self, **kwargs: Any) -> None: _col_types = kwargs["col_types"] row = kwargs["row"] @@ -70,17 +42,17 @@ def __init__(self, **kwargs: Any) -> None: _columns = kwargs["columns"] self._row_dict = {_columns[i]: self._row[i] for i in range(len(self._row))} - def __iter__(self): + def __iter__(self) -> Iterator[Any]: """This will iterate over the row directly.""" return iter(self._row) - def __len__(self): + def __len__(self) -> int: return len(self._row) - def __repr__(self): + def __repr__(self) -> str: return repr(self._row) - def __getitem__(self, column): + def __getitem__(self, column: Union[str, int]) -> Any: """This type must be subscriptable directly to row. Must be gettable by both column name and row index @@ -90,40 +62,190 @@ def __getitem__(self, column): try: return self._row_dict[column] except KeyError: - return self._row[column] - - -class MetricsQueryResult(object): - """The response to a metrics query. - - :ivar cost: The integer value representing the cost of the query, for data case. - :vartype cost: int - :ivar timespan: Required. The timespan for which the data was retrieved. Its value consists of - two datetimes concatenated, separated by '/'. This may be adjusted in the future and returned - back from what was originally requested. - :vartype timespan: str - :ivar granularity: The granularity (window size) for which the metric data was returned in. This - may be adjusted in the future and returned back from what was originally requested. This is - not present if a metadata request was made. - :vartype granularity: ~datetime.timedelta - :ivar namespace: The namespace of the metrics that has been queried. - :vartype namespace: str - :ivar resource_region: The region of the resource that has been queried for metrics. - :vartype resource_region: str - :ivar metrics: Required. The value of the collection. - :vartype metrics: list[~azure.monitor.query.Metric] + return self._row[int(column)] + + +class LogsTable: + """Contains the columns and rows for one table in a query response. + + All required parameters must be populated in order to send to Azure. """ - def __init__(self, **kwargs) -> None: - self.cost = kwargs.get("cost", None) + name: str + """Required. The name of the table.""" + rows: List[LogsTableRow] + """Required. The resulting rows from this query.""" + columns: Optional[List[str]] = None + """The labels of columns in this table.""" + columns_types: Optional[List[Any]] = None + """The types of columns in this table.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = kwargs.pop("name", "") + self.columns = kwargs.pop("columns", None) + self.columns_types = kwargs.pop("column_types", None) + _rows = kwargs.pop("rows", []) + self.rows: List[LogsTableRow] = [ + LogsTableRow( + row=row, + row_index=ind, + col_types=self.columns_types, + columns=self.columns, + ) + for ind, row in enumerate(_rows) + ] + + @classmethod + def _from_generated(cls, generated) -> "LogsTable": + return cls( + name=generated.get("name"), + columns=[col["name"] for col in generated.get("columns", [])], + column_types=[col["type"] for col in generated.get("columns", [])], + rows=generated.get("rows"), + ) + + +class MetricValue: + """Represents a metric value.""" + + timestamp: datetime + """The timestamp for the metric value.""" + average: Optional[float] = None + """The average value in the time range.""" + minimum: Optional[float] = None + """The least value in the time range.""" + maximum: Optional[float] = None + """The greatest value in the time range.""" + total: Optional[float] = None + """The sum of all of the values in the time range.""" + count: Optional[float] = None + """The number of samples in the time range. Can be used to determine the number of values that + contributed to the average value.""" + + + def __init__(self, **kwargs: Any) -> None: + self.timestamp = kwargs["timestamp"] + self.average = kwargs.get("average", None) + self.minimum = kwargs.get("minimum", None) + self.maximum = kwargs.get("maximum", None) + self.total = kwargs.get("total", None) + self.count = kwargs.get("count", None) + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + timestamp=Deserializer.deserialize_iso(generated.get("time_stamp")), + average=generated.get("average"), + minimum=generated.get("minimum"), + maximum=generated.get("maximum"), + total=generated.get("total"), + count=generated.get("count"), + ) + + +class TimeSeriesElement: + """A time series result type. The discriminator value is always TimeSeries in this case.""" + + metadata_values: Dict[str, str] + """The metadata values returned if $filter was specified in the call.""" + data: List[MetricValue] + """An array of data points representing the metric values. This is only returned if a result + type of data is specified.""" + + def __init__(self, **kwargs: Any) -> None: + self.metadata_values = kwargs.get("metadata_values", {}) + self.data = kwargs.get("data", []) + + @classmethod + def _from_generated(cls, generated) -> "TimeSeriesElement": + if not generated: + return cls() + return cls( + metadata_values={ + obj["name"]["value"]: obj.get("value") for obj in generated.get("metadatavalues", []) + }, + data=[ + MetricValue._from_generated(val) for val in generated.get("data", []) # pylint: disable=protected-access + ], + ) + + +class Metric: + """The result data of a single metric name.""" + + id: str + """The metric ID.""" + type: str + """The resource type of the metric resource.""" + name: str + """The name of the metric.""" + unit: str + """The unit of the metric. To access these values, use the MetricUnit enum. + Possible values include "Count", "Bytes", "Seconds", "CountPerSecond", "BytesPerSecond", "Percent", + "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores", "BitsPerSecond".""" + timeseries: TimeSeriesElement + """The time series returned when a data query is performed.""" + display_description: str + """Detailed description of this metric.""" + + def __init__(self, **kwargs: Any) -> None: + self.id = kwargs["id"] + self.type = kwargs["type"] + self.name = kwargs["name"] + self.unit = kwargs["unit"] + self.timeseries = kwargs["timeseries"] + self.display_description = kwargs["display_description"] + + @classmethod + def _from_generated(cls, generated) -> "Metric": + if not generated: + return cls() + return cls( + id=generated.get("id"), + type=generated.get("type"), + name=generated.get("name", {}).get("value"), + unit=generated.get("unit"), + timeseries=[ + TimeSeriesElement._from_generated(t) # pylint: disable=protected-access + for t in generated.get("timeseries", []) + ], + display_description=generated.get("displayDescription"), + ) + + +class MetricsQueryResult: + """The response to a metrics query.""" + + timespan: str + """Required. The timespan for which the data was retrieved. Its value consists of two datetimes + concatenated, separated by '/'. This may be adjusted in the future and returned back from what + was originally requested.""" + metrics: List[Metric] + """Required. The value of the collection.""" + granularity: Optional[timedelta] = None + """The granularity (window size) for which the metric data was returned in. This may be adjusted + in the future and returned back from what was originally requested. This is not present if a + metadata request was made.""" + namespace: Optional[str] = None + """The namespace of the metrics that has been queried.""" + resource_region: Optional[str] = None + """The region of the resource that has been queried for metrics.""" + cost: Optional[int] = None + """The integer value representing the cost of the query, for data case.""" + + + def __init__(self, **kwargs: Any) -> None: self.timespan = kwargs["timespan"] + self.metrics= kwargs["metrics"] self.granularity = kwargs.get("granularity", None) self.namespace = kwargs.get("namespace", None) self.resource_region = kwargs.get("resource_region", None) - self.metrics = kwargs["metrics"] + self.cost = kwargs.get("cost", None) @classmethod - def _from_generated(cls, generated): + def _from_generated(cls, generated) -> "MetricsQueryResult": if not generated: return cls() granularity = None @@ -140,10 +262,11 @@ def _from_generated(cls, generated): ]), ) + class MetricsList(list): - """Custom list for metrics - """ - def __init__(self, **kwargs): # pylint: disable=super-init-not-called + """Custom list for metrics.""" + + def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-called self._metrics = kwargs['metrics'] self._metric_names = {val.name: ind for ind, val in enumerate(self._metrics)} @@ -163,7 +286,7 @@ def __getitem__(self, metric): return self._metrics[self._metric_names[metric]] -class LogsBatchQuery(object): +class LogsBatchQuery: """A single request in a batch. The batch query API accepts a list of these objects. :param workspace_id: Workspace Id to be included in the query. @@ -186,6 +309,15 @@ class LogsBatchQuery(object): visualization to show. """ + id: str + """The id of the query.""" + body: Dict[str, Any] + """The body of the query.""" + headers: Dict[str, str] + """The headers of the query.""" + workspace: str + """The workspace ID to be included in the query.""" + def __init__( self, workspace_id: str, @@ -212,18 +344,18 @@ def __init__( prefer += "include-render=true" headers = {"Prefer": prefer} - timespan = construct_iso8601(timespan) + timespan_iso = construct_iso8601(timespan) additional_workspaces = kwargs.pop("additional_workspaces", None) self.id = str(uuid.uuid4()) self.body = { "query": query, - "timespan": timespan, + "timespan": timespan_iso, "workspaces": additional_workspaces, } self.headers = headers self.workspace = workspace_id - def _to_generated(self): + def _to_generated(self) -> Dict[str, Any]: return { "id": self.id, "body": self.body, @@ -234,36 +366,42 @@ def _to_generated(self): } -class LogsQueryResult(object): - """The LogsQueryResult type is returned when the response of a query is a success. - - :ivar tables: The list of tables, columns and rows. - :vartype tables: list[~azure.monitor.query.LogsTable] - :ivar statistics: This will include a statistics property in the response that describes various - performance statistics such as query execution time and resource usage. - :vartype statistics: Mapping - :ivar visualization: This will include a visualization property in the response that specifies the type of - visualization selected by the query and any properties for that visualization. - :vartype visualization: Mapping - :ivar status: The status of the result. - Always 'Success' for an instance of a LogsQueryResult. - :vartype status: ~azure.monitor.query.LogsQueryStatus - """ +class LogsQueryStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the result object.""" - def __init__(self, **kwargs): - self.tables = kwargs.get("tables", None) + PARTIAL = "PartialError" + SUCCESS = "Success" + FAILURE = "Failure" + + +class LogsQueryResult: + """The LogsQueryResult type is returned when the response of a query is a success.""" + + tables: List[LogsTable] + """The list of tables, columns and rows.""" + statistics: Optional[JSON] = None + """This will include a statistics property in the response that describes various performance + statistics such as query execution time and resource usage.""" + visualization: Optional[JSON] = None + """This will include a visualization property in the response that specifies the type of visualization selected + by the query and any properties for that visualization.""" + status: LogsQueryStatus + """The status of the result. Always 'Success' for an instance of a LogsQueryResult.""" + + def __init__(self, **kwargs: Any) -> None: + self.tables = kwargs.get("tables", []) self.statistics = kwargs.get("statistics", None) self.visualization = kwargs.get("visualization", None) self.status = LogsQueryStatus.SUCCESS - def __iter__(self): + def __iter__(self) -> Iterator[LogsTable]: return iter(self.tables) @classmethod - def _from_generated(cls, generated): + def _from_generated(cls, generated) -> "LogsQueryResult": if not generated: return cls() - tables = None + tables = [] if "body" in generated: generated = generated["body"] if generated.get("tables"): @@ -286,20 +424,20 @@ class MetricNamespaceClassification(str, Enum, metaclass=CaseInsensitiveEnumMeta QOS = "Qos" -class MetricNamespace(object): - """Metric namespace class specifies the metadata for a metric namespace. +class MetricNamespace: + """Metric namespace class specifies the metadata for a metric namespace.""" + + id: Optional[str] = None + """The ID of the metricNamespace.""" + type: Optional[str] = None + """The type of the namespace.""" + name: Optional[str] = None + """The name of the namespace.""" + fully_qualified_namespace: Optional[str] = None + """The fully qualified namespace name.""" + namespace_classification: Optional[Union[str, MetricNamespaceClassification]] = None + """Kind of namespace. Possible values include "Platform", "Custom", "Qos".""" - :ivar id: The ID of the metricNamespace. - :vartype id: str - :ivar type: The type of the namespace. - :vartype type: str - :ivar name: The name of the namespace. - :vartype name: str - :ivar fully_qualified_namespace: The fully qualified namespace name. - :vartype fully_qualified_namespace: str - :ivar namespace_classification: Kind of namespace. Possible values include: "Platform", "Custom", "Qos". - :vartype namespace_classification: str or ~azure.monitor.query.MetricNamespaceClassification - """ def __init__(self, **kwargs: Any) -> None: self.id = kwargs.get("id", None) @@ -309,7 +447,7 @@ def __init__(self, **kwargs: Any) -> None: self.namespace_classification = kwargs.get("namespace_classification", None) @classmethod - def _from_generated(cls, generated): + def _from_generated(cls, generated) -> "MetricNamespace": if not generated: return cls() fully_qualified_namespace = None @@ -334,218 +472,51 @@ class MetricClass(str, Enum, metaclass=CaseInsensitiveEnumMeta): SATURATION = "Saturation" -class MetricDefinition(object): # pylint: disable=too-many-instance-attributes - """Metric definition class specifies the metadata for a metric. - - :ivar dimension_required: Flag to indicate whether the dimension is required. - :vartype dimension_required: bool - :ivar resource_id: the resource identifier of the resource that emitted the metric. - :vartype resource_id: str - :ivar namespace: the namespace the metric belongs to. - :vartype namespace: str - :ivar name: the name and the display name of the metric, i.e. it is a localizable string. - :vartype name: str - :ivar unit: the unit of the metric. Possible values include: "Count", "Bytes", "Seconds", - "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds", "Unspecified", - "Cores", "MilliCores", "NanoCores", "BitsPerSecond". - :vartype unit: str or ~azure.monitor.query.MetricUnit - :ivar primary_aggregation_type: the primary aggregation type value defining how to use the - values for display. Possible values include: "None", "Average", "Count", "Minimum", "Maximum", - "Total". - :vartype primary_aggregation_type: str or ~azure.monitor.query.MetricAggregationType - :ivar metric_class: The class of the metric. Possible values include: "Availability", - "Transactions", "Errors", "Latency", "Saturation". - :vartype metric_class: str or ~azure.monitor.query.MetricClass - :ivar supported_aggregation_types: the collection of what aggregation types are supported. - :vartype supported_aggregation_types: list[str or ~azure.monitor.query.MetricAggregationType] - :ivar metric_availabilities: the collection of what aggregation intervals are available to be - queried. - :vartype metric_availabilities: list[~azure.monitor.query.MetricAvailability] - :ivar id: the resource identifier of the metric definition. - :vartype id: str - :ivar dimensions: the name and the display name of the dimension, i.e. it is a localizable - string. - :vartype dimensions: list[str] - """ - - def __init__(self, **kwargs: Any) -> None: - self.dimension_required = kwargs.get( - "dimension_required", None - ) # type: Optional[bool] - self.resource_id = kwargs.get("resource_id", None) # type: Optional[str] - self.namespace = kwargs.get("namespace", None) # type: Optional[str] - self.name = kwargs.get("name", None) # type: Optional[str] - self.unit = kwargs.get("unit", None) # type: Optional[str] - self.primary_aggregation_type = kwargs.get( - "primary_aggregation_type", None - ) # type: Optional[str] - self.supported_aggregation_types = kwargs.get( - "supported_aggregation_types", None - ) # type: Optional[str] - self.metric_availabilities = kwargs.get( - "metric_availabilities", None - ) # type: List[MetricAvailability] - self.id = kwargs.get("id", None) # type: Optional[str] - self.dimensions = kwargs.get("dimensions", None) # type: Optional[List[str]] - self.metric_class = kwargs.get("metric_class", None) # type: Optional[str] - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - dimensions, metric_class = None, None - if generated.get("dimensions"): - dimensions = [d["value"] for d in generated["dimensions"]] - if generated.get("metricClass"): - metric_class = MetricClass(generated["metricClass"]) - return cls( - dimension_required=generated.get("isDimensionRequired"), - resource_id=generated.get("resourceId"), - namespace=generated.get("namespace"), - name=generated.get("name", {}).get("value"), - unit=generated.get("unit"), - primary_aggregation_type=generated.get("primaryAggregationType"), - supported_aggregation_types=generated.get("supportedAggregationTypes"), - metric_class=metric_class, - metric_availabilities=[ - MetricAvailability._from_generated( # pylint: disable=protected-access - val - ) - for val in generated.get("metricAvailabilities", []) - ], - id=generated.get("id"), - dimensions=dimensions, - ) - - -class MetricValue(object): - """Represents a metric value. - - :ivar timestamp: The timestamp for the metric value. - :vartype timestamp: ~datetime.datetime - :ivar average: The average value in the time range. - :vartype average: float - :ivar minimum: The least value in the time range. - :vartype minimum: float - :ivar maximum: The greatest value in the time range. - :vartype maximum: float - :ivar total: The sum of all of the values in the time range. - :vartype total: float - :ivar count: The number of samples in the time range. Can be used to determine the number of - values that contributed to the average value. - :vartype count: float - """ - - def __init__(self, **kwargs: Any) -> None: - self.timestamp = kwargs["timestamp"] - self.average = kwargs.get("average", None) - self.minimum = kwargs.get("minimum", None) - self.maximum = kwargs.get("maximum", None) - self.total = kwargs.get("total", None) - self.count = kwargs.get("count", None) - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - timestamp=Deserializer.deserialize_iso(generated.get("time_stamp")), - average=generated.get("average"), - minimum=generated.get("minimum"), - maximum=generated.get("maximum"), - total=generated.get("total"), - count=generated.get("count"), - ) - - -class Metric(object): - """The result data of a single metric name. - - :ivar id: The metric Id. - :vartype id: str - :ivar type: The resource type of the metric resource. - :vartype type: str - :ivar name: The name of the metric. - :vartype name: str - :ivar unit: The unit of the metric. To access these values, use the MetricUnit enum. - Possible values include: "Count", "Bytes", - "Seconds", "CountPerSecond", "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds", - "Unspecified", "Cores", "MilliCores", "NanoCores", "BitsPerSecond". - :vartype unit: str - :ivar timeseries: The time series returned when a data query is performed. - :vartype timeseries: list[~azure.monitor.query.TimeSeriesElement] - :ivar display_description: Detailed description of this metric. - :vartype display_description: str - """ - - def __init__(self, **kwargs: Any) -> None: - self.id = kwargs["id"] - self.type = kwargs["type"] - self.name = kwargs["name"] - self.unit = kwargs["unit"] - self.timeseries = kwargs["timeseries"] - self.display_description = kwargs["display_description"] - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - id=generated.get("id"), - type=generated.get("type"), - name=generated.get("name", {}).get("value"), - unit=generated.get("unit"), - timeseries=[ - TimeSeriesElement._from_generated(t) # pylint: disable=protected-access - for t in generated.get("timeseries", []) - ], - display_description=generated.get("displayDescription"), - ) +class MetricUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The unit of the metric.""" + COUNT = "Count" + BYTES = "Bytes" + SECONDS = "Seconds" + COUNT_PER_SECOND = "CountPerSecond" + BYTES_PER_SECOND = "BytesPerSecond" + PERCENT = "Percent" + MILLI_SECONDS = "MilliSeconds" + BYTE_SECONDS = "ByteSeconds" + UNSPECIFIED = "Unspecified" + CORES = "Cores" + MILLI_CORES = "MilliCores" + NANO_CORES = "NanoCores" + BITS_PER_SECOND = "BitsPerSecond" -class TimeSeriesElement(object): - """A time series result type. The discriminator value is always TimeSeries in this case. - :ivar metadata_values: The metadata values returned if $filter was specified in the call. - :vartype metadata_values: dict(str, str) - :ivar data: An array of data points representing the metric values. This is only returned if - a result type of data is specified. - :vartype data: list[~azure.monitor.query.MetricValue] - """ - def __init__(self, **kwargs: Any) -> None: - self.metadata_values = kwargs.get("metadata_values", None) - self.data = kwargs.get("data", None) +class MetricAggregationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The aggregation type of the metric.""" - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - metadata_values={ - obj["name"]["value"]: obj.get("value") for obj in generated.get("metadatavalues", []) - }, - data=[ - MetricValue._from_generated(val) for val in generated.get("data", []) # pylint: disable=protected-access - ], - ) + NONE = "None" + AVERAGE = "Average" + COUNT = "Count" + MINIMUM = "Minimum" + MAXIMUM = "Maximum" + TOTAL = "Total" -class MetricAvailability(object): +class MetricAvailability: """Metric availability specifies the time grain (aggregation interval or frequency) and the retention period for that time grain. - - :ivar granularity: the time grain specifies the aggregation interval for the metric. - :vartype granularity: ~datetime.timedelta - :ivar retention: the retention period for the metric at the specified timegrain. - :vartype retention: ~datetime.timedelta """ + granularity: Optional[timedelta] = None + """The time grain specifies the aggregation interval for the metric.""" + retention: Optional[timedelta] = None + """The retention period for the metric at the specified timegrain.""" + def __init__(self, **kwargs: Any) -> None: self.granularity = kwargs.get("granularity", None) self.retention = kwargs.get("retention", None) @classmethod - def _from_generated(cls, generated): + def _from_generated(cls, generated) -> "MetricAvailability": if not generated: return cls() granularity, retention = None, None @@ -559,65 +530,111 @@ def _from_generated(cls, generated): ) -class MetricAggregationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The aggregation type of the metric.""" - NONE = "None" - AVERAGE = "Average" - COUNT = "Count" - MINIMUM = "Minimum" - MAXIMUM = "Maximum" - TOTAL = "Total" +class MetricDefinition: # pylint: disable=too-many-instance-attributes + """Metric definition class specifies the metadata for a metric.""" + + dimension_required: Optional[bool] = None + """Flag to indicate whether the dimension is required.""" + resource_id: Optional[str] = None + """The resource identifier of the resource that emitted the metric.""" + namespace: Optional[str] = None + """The namespace the metric belongs to.""" + name: Optional[str] = None + """The name and the display name of the metric, i.e. it is a localizable string.""" + unit: Optional[Union[str, MetricUnit]] = None + """The unit of the metric. Possible values include "Count", "Bytes", "Seconds", "CountPerSecond", + "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", + "NanoCores", "BitsPerSecond".""" + primary_aggregation_type: Optional[Union[str, MetricAggregationType]] = None + """The primary aggregation type value defining how to use the values for display. Possible values + include "None", "Average", "Count", "Minimum", "Maximum", "Total".""" + metric_class: Optional[Union[str, MetricClass]] = None + """The class of the metric. Possible values include "Availability", "Transactions", "Errors", + "Latency", "Saturation".""" + supported_aggregation_types: Optional[List[Union[str, MetricAggregationType]]] = None + """The collection of what aggregation types are supported.""" + metric_availabilities: Optional[List[MetricAvailability]] = None + """The collection of what aggregation intervals are available to be queried.""" + id: Optional[str] = None + """The resource identifier of the metric definition.""" + dimensions: Optional[List[str]] = None + """The name and the display name of the dimension, i.e. it is a localizable string.""" -class MetricUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The unit of the metric.""" + def __init__(self, **kwargs: Any) -> None: + self.dimension_required = kwargs.get("dimension_required", None) + self.resource_id = kwargs.get("resource_id", None) + self.namespace = kwargs.get("namespace", None) + self.name = kwargs.get("name", None) + self.unit = kwargs.get("unit", None) + self.primary_aggregation_type = kwargs.get("primary_aggregation_type", None) + self.supported_aggregation_types =kwargs.get("supported_aggregation_types", None) + self.metric_availabilities = kwargs.get("metric_availabilities", None) + self.id = kwargs.get("id", None) + self.dimensions = kwargs.get("dimensions", None) + self.metric_class = kwargs.get("metric_class", None) - COUNT = "Count" - BYTES = "Bytes" - SECONDS = "Seconds" - COUNT_PER_SECOND = "CountPerSecond" - BYTES_PER_SECOND = "BytesPerSecond" - PERCENT = "Percent" - MILLI_SECONDS = "MilliSeconds" - BYTE_SECONDS = "ByteSeconds" - UNSPECIFIED = "Unspecified" - CORES = "Cores" - MILLI_CORES = "MilliCores" - NANO_CORES = "NanoCores" - BITS_PER_SECOND = "BitsPerSecond" + @classmethod + def _from_generated(cls, generated) -> "MetricDefinition": + if not generated: + return cls() + dimensions, metric_class = None, None + if generated.get("dimensions"): + dimensions = [d["value"] for d in generated["dimensions"]] + if generated.get("metricClass"): + metric_class = MetricClass(generated["metricClass"]) + return cls( + dimension_required=generated.get("isDimensionRequired"), + resource_id=generated.get("resourceId"), + namespace=generated.get("namespace"), + name=generated.get("name", {}).get("value"), + unit=generated.get("unit"), + primary_aggregation_type=generated.get("primaryAggregationType"), + supported_aggregation_types=generated.get("supportedAggregationTypes"), + metric_class=metric_class, + metric_availabilities=[ + MetricAvailability._from_generated( # pylint: disable=protected-access + val + ) + for val in generated.get("metricAvailabilities", []) + ], + id=generated.get("id"), + dimensions=dimensions, + ) -class LogsQueryPartialResult(object): +class LogsQueryPartialResult: """The LogsQueryPartialResult type is returned when the response of a query is a partial success (or partial failure). - - :ivar partial_data: The list of tables, columns and rows. - :vartype partial_data: list[~azure.monitor.query.LogsTable] - :ivar statistics: This will include a statistics property in the response that describes various - performance statistics such as query execution time and resource usage. - :vartype statistics: Mapping - :ivar visualization: This will include a visualization property in the response that specifies the type of - visualization selected by the query and any properties for that visualization. - :vartype visualization: Mapping - :ivar partial_error: The partial error info - :vartype partial_error: ~azure.monitor.query.LogsQueryError - :ivar status: The status of the result. Always 'PartialError' for an instance of a LogsQueryPartialResult. - :vartype status: ~azure.monitor.query.LogsQueryStatus """ + partial_data: List[LogsTable] + """The list of tables, columns and rows.""" + statistics: Optional[JSON] = None + """This will include a statistics property in the response that describes various performance statistics + such as query execution time and resource usage.""" + visualization: Optional[JSON] = None + """This will include a visualization property in the response that specifies the type of visualization + selected by the query and any properties for that visualization.""" + partial_error: Any + """The partial error info.""" + status: LogsQueryStatus + """The status of the result. Always 'PartialError' for an instance of a LogsQueryPartialResult.""" + + def __init__(self, **kwargs: Any) -> None: - self.partial_data = kwargs.get("partial_data", None) + self.partial_data = kwargs.get("partial_data", []) self.partial_error = kwargs.get("partial_error", None) self.statistics = kwargs.get("statistics", None) self.visualization = kwargs.get("visualization", None) self.status = LogsQueryStatus.PARTIAL - def __iter__(self): + def __iter__(self) -> Iterator[LogsTable]: return iter(self.partial_data) @classmethod - def _from_generated(cls, generated, error): # pylint: disable=arguments-differ + def _from_generated(cls, generated, error) -> "LogsQueryPartialResult": if not generated: return cls() partial_data = None @@ -634,11 +651,3 @@ def _from_generated(cls, generated, error): # pylint: disable=arguments-differ statistics=generated.get("statistics"), visualization=generated.get("render"), ) - - -class LogsQueryStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the result object.""" - - PARTIAL = "PartialError" - SUCCESS = "Success" - FAILURE = "Failure" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py index 0a168f2a3e60..a3abd8b795e6 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py @@ -4,13 +4,15 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +from typing import Optional + from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy def get_authentication_policy( credential: AsyncTokenCredential, - audience: str = None + audience: Optional[str] = None ) -> AsyncBearerTokenCredentialPolicy: """Returns the correct authentication policy""" if not audience: @@ -28,7 +30,7 @@ def get_authentication_policy( def get_metrics_authentication_policy( credential: AsyncTokenCredential, - audience: str = None + audience: Optional[str] = None ) -> AsyncBearerTokenCredentialPolicy: """Returns the correct authentication policy""" if not audience: diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py index 71df5e1d1967..6460bfb793f2 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py @@ -5,7 +5,7 @@ # license information. # -------------------------------------------------------------------------- from datetime import datetime, timedelta -from typing import Any, Tuple, Union, Sequence, Dict, List +from typing import Any, cast, Tuple, Union, Sequence, Dict, List from azure.core.credentials_async import AsyncTokenCredential from azure.core.exceptions import HttpResponseError @@ -84,7 +84,7 @@ async def query_workspace( :rtype: ~azure.monitor.query.LogsQueryResult or ~azure.monitor.query.LogsQueryPartialResult :raises: ~azure.core.exceptions.HttpResponseError """ - timespan = construct_iso8601(timespan) + timespan_iso = construct_iso8601(timespan) include_statistics = kwargs.pop("include_statistics", False) include_visualization = kwargs.pop("include_visualization", False) server_timeout = kwargs.pop("server_timeout", None) @@ -96,7 +96,7 @@ async def query_workspace( body = { "query": query, - "timespan": timespan, + "timespan": timespan_iso, "workspaces": additional_workspaces } @@ -108,7 +108,7 @@ async def query_workspace( ) except HttpResponseError as err: process_error(err, LogsQueryError) - response = None + response: Union[LogsQueryResult, LogsQueryPartialResult] if not generated_response.get("error"): response = LogsQueryResult._from_generated( # pylint: disable=protected-access generated_response @@ -139,11 +139,11 @@ async def query_batch( :raises: ~azure.core.exceptions.HttpResponseError """ try: - queries = [LogsBatchQuery(**q) for q in queries] + queries = [LogsBatchQuery(**cast(Dict, q)) for q in queries] except (KeyError, TypeError): pass queries = [ - q._to_generated() for q in queries # pylint: disable=protected-access + cast(LogsBatchQuery, q)._to_generated() for q in queries # pylint: disable=protected-access ] request_order = [req["id"] for req in queries] batch = {"requests": queries} @@ -162,7 +162,7 @@ async def __aenter__(self) -> "LogsQueryClient": await self._client.__aenter__() return self - async def __aexit__(self, *args: "Any") -> None: + async def __aexit__(self, *args: Any) -> None: await self._client.__aexit__(*args) async def close(self) -> None: diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py index f9ec75551ef5..b070d05753cb 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py @@ -5,7 +5,7 @@ # license information. # -------------------------------------------------------------------------- # pylint: disable=anomalous-backslash-in-string -from typing import Any, List +from typing import Any, cast, List from azure.core.async_paging import AsyncItemPaged from azure.core.credentials_async import AsyncTokenCredential @@ -127,7 +127,7 @@ def list_metric_namespaces( start_time = kwargs.pop("start_time", None) if start_time: start_time = Serializer.serialize_iso(start_time) - return self._namespace_op.list( + res = self._namespace_op.list( resource_uri, start_time=start_time, cls=kwargs.pop( @@ -139,6 +139,7 @@ def list_metric_namespaces( ), **kwargs ) + return cast(AsyncItemPaged[MetricNamespace], res) @distributed_trace def list_metric_definitions( @@ -155,7 +156,7 @@ def list_metric_definitions( :raises: ~azure.core.exceptions.HttpResponseError """ metric_namespace = kwargs.pop("namespace", None) - return self._definitions_op.list( + res = self._definitions_op.list( resource_uri, metricnamespace=metric_namespace, cls=kwargs.pop( @@ -167,6 +168,7 @@ def list_metric_definitions( ), **kwargs ) + return cast(AsyncItemPaged[MetricDefinition], res) async def __aenter__(self) -> "MetricsQueryClient": await self._client.__aenter__() diff --git a/tools/azure-sdk-tools/ci_tools/environment_exclusions.py b/tools/azure-sdk-tools/ci_tools/environment_exclusions.py index 5d2340f0178a..c0904c66002d 100644 --- a/tools/azure-sdk-tools/ci_tools/environment_exclusions.py +++ b/tools/azure-sdk-tools/ci_tools/environment_exclusions.py @@ -101,7 +101,6 @@ "azure-iot-modelsrepository", "azure-monitor-ingestion", "azure-monitor-opentelemetry-exporter", - "azure-monitor-query", "azure-purview-administration", "azure-purview-catalog", "azure-purview-scanning", @@ -170,7 +169,6 @@ "azure-iot-modelsrepository", "azure-monitor-ingestion", "azure-monitor-opentelemetry-exporter", - "azure-monitor-query", "azure-ai-personalizer", "azure-purview-administration", "azure-purview-catalog", @@ -244,7 +242,6 @@ "azure-iot-modelsrepository", "azure-monitor-ingestion", "azure-monitor-opentelemetry-exporter", - "azure-monitor-query", "azure-ai-personalizer", "azure-purview-administration", "azure-purview-catalog",