Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[metricsadvisor] mypy #14120

Merged
4 commits merged into from
Sep 29, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion eng/tox/mypy_hard_failure_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@
"azure-eventhub",
"azure-servicebus",
"azure-ai-textanalytics",
"azure-ai-formrecognizer"
"azure-ai-formrecognizer",
"azure-ai-metricsadvisor"
]
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import datetime
import six
from msrest import Serializer
from azure.core.exceptions import HttpResponseError
from .models import (
DataFeedGranularityType,
DataFeedGranularity,
Expand Down Expand Up @@ -178,11 +179,11 @@ def convert_to_generated_data_feed_type(
def convert_to_sub_feedback(feedback):
# type: (MetricFeedback) -> Union[AnomalyFeedback, ChangePointFeedback, CommentFeedback, PeriodFeedback]
if feedback.feedback_type == "Anomaly":
return AnomalyFeedback._from_generated(feedback)
return AnomalyFeedback._from_generated(feedback) # type: ignore
if feedback.feedback_type == "ChangePoint":
return ChangePointFeedback._from_generated(feedback)
return ChangePointFeedback._from_generated(feedback) # type: ignore
if feedback.feedback_type == "Comment":
return CommentFeedback._from_generated(feedback)
return CommentFeedback._from_generated(feedback) # type: ignore
if feedback.feedback_type == "Period":
return PeriodFeedback._from_generated(feedback)
return None
return PeriodFeedback._from_generated(feedback) # type: ignore
raise HttpResponseError("Invalid feedback type returned in the response.")
Original file line number Diff line number Diff line change
Expand Up @@ -11,41 +11,46 @@
Any,
List,
Union,
cast,
TYPE_CHECKING
)
import datetime
import six
from azure.core.tracing.decorator import distributed_trace
from ._generated._azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2 \
import AzureCognitiveServiceMetricsAdvisorRESTAPIOpenAPIV2 as _Client
from ._generated.models import AnomalyAlertingConfiguration as _AnomalyAlertingConfiguration
from ._generated.models import AzureApplicationInsightsDataFeed as _AzureApplicationInsightsDataFeed
from ._generated.models import AzureBlobDataFeed as _AzureBlobDataFeed
from ._generated.models import AzureCosmosDBDataFeed as _AzureCosmosDBDataFeed
from ._generated.models import AzureDataExplorerDataFeed as _AzureDataExplorerDataFeed
from ._generated.models import AzureTableDataFeed as _AzureTableDataFeed
from ._generated.models import HttpRequestDataFeed as _HttpRequestDataFeed
from ._generated.models import InfluxDBDataFeed as _InfluxDBDataFeed
from ._generated.models import MySqlDataFeed as _MySqlDataFeed
from ._generated.models import PostgreSqlDataFeed as _PostgreSqlDataFeed
from ._generated.models import MongoDBDataFeed as _MongoDBDataFeed
from ._generated.models import SQLServerDataFeed as _SQLServerDataFeed
from ._generated.models import AzureDataLakeStorageGen2DataFeed as _AzureDataLakeStorageGen2DataFeed
from ._generated.models import AzureDataLakeStorageGen2DataFeedPatch as _AzureDataLakeStorageGen2DataFeedPatch
from ._generated.models import ElasticsearchDataFeed as _ElasticsearchDataFeed
from ._generated.models import ElasticsearchDataFeedPatch as _ElasticsearchDataFeedPatch
from ._generated.models import AzureApplicationInsightsDataFeedPatch as _AzureApplicationInsightsDataFeedPatch
from ._generated.models import AzureBlobDataFeedPatch as _AzureBlobDataFeedPatch
from ._generated.models import AzureCosmosDBDataFeedPatch as _AzureCosmosDBDataFeedPatch
from ._generated.models import AzureDataExplorerDataFeedPatch as _AzureDataExplorerDataFeedPatch
from ._generated.models import AzureTableDataFeedPatch as _AzureTableDataFeedPatch
from ._generated.models import HttpRequestDataFeedPatch as _HttpRequestDataFeedPatch
from ._generated.models import InfluxDBDataFeedPatch as _InfluxDBDataFeedPatch
from ._generated.models import MySqlDataFeedPatch as _MySqlDataFeedPatch
from ._generated.models import PostgreSqlDataFeedPatch as _PostgreSqlDataFeedPatch
from ._generated.models import MongoDBDataFeedPatch as _MongoDBDataFeedPatch
from ._generated.models import SQLServerDataFeedPatch as _SQLServerDataFeedPatch
from ._generated.models import AnomalyDetectionConfiguration as _AnomalyDetectionConfiguration
from ._generated.models import (
AnomalyAlertingConfiguration as _AnomalyAlertingConfiguration,
AzureApplicationInsightsDataFeed as _AzureApplicationInsightsDataFeed,
AzureBlobDataFeed as _AzureBlobDataFeed,
AzureCosmosDBDataFeed as _AzureCosmosDBDataFeed,
AzureDataExplorerDataFeed as _AzureDataExplorerDataFeed,
AzureTableDataFeed as _AzureTableDataFeed,
HttpRequestDataFeed as _HttpRequestDataFeed,
InfluxDBDataFeed as _InfluxDBDataFeed,
MySqlDataFeed as _MySqlDataFeed,
PostgreSqlDataFeed as _PostgreSqlDataFeed,
MongoDBDataFeed as _MongoDBDataFeed,
SQLServerDataFeed as _SQLServerDataFeed,
AzureDataLakeStorageGen2DataFeed as _AzureDataLakeStorageGen2DataFeed,
AzureDataLakeStorageGen2DataFeedPatch as _AzureDataLakeStorageGen2DataFeedPatch,
ElasticsearchDataFeed as _ElasticsearchDataFeed,
ElasticsearchDataFeedPatch as _ElasticsearchDataFeedPatch,
AzureApplicationInsightsDataFeedPatch as _AzureApplicationInsightsDataFeedPatch,
AzureBlobDataFeedPatch as _AzureBlobDataFeedPatch,
AzureCosmosDBDataFeedPatch as _AzureCosmosDBDataFeedPatch,
AzureDataExplorerDataFeedPatch as _AzureDataExplorerDataFeedPatch,
AzureTableDataFeedPatch as _AzureTableDataFeedPatch,
HttpRequestDataFeedPatch as _HttpRequestDataFeedPatch,
InfluxDBDataFeedPatch as _InfluxDBDataFeedPatch,
MySqlDataFeedPatch as _MySqlDataFeedPatch,
PostgreSqlDataFeedPatch as _PostgreSqlDataFeedPatch,
MongoDBDataFeedPatch as _MongoDBDataFeedPatch,
SQLServerDataFeedPatch as _SQLServerDataFeedPatch,
AnomalyDetectionConfiguration as _AnomalyDetectionConfiguration,
IngestionProgressResetOptions as _IngestionProgressResetOptions,
IngestionStatusQueryOptions as _IngestionStatusQueryOptions,
)
from ._version import SDK_MONIKER
from ._metrics_advisor_key_credential_policy import MetricsAdvisorKeyCredentialPolicy
from ._helpers import (
Expand Down Expand Up @@ -218,7 +223,7 @@ def create_anomaly_alert_configuration(
"""

cross_metrics_operator = kwargs.pop("cross_metrics_operator", None)
response_headers = self._client.create_anomaly_alerting_configuration(
response_headers = self._client.create_anomaly_alerting_configuration( # type: ignore
_AnomalyAlertingConfiguration(
name=name,
metric_alerting_configurations=[
Expand Down Expand Up @@ -289,7 +294,7 @@ def create_data_feed(
options=options
)

response_headers = self._client.create_data_feed(
response_headers = self._client.create_data_feed( # type: ignore
data_feed_detail,
cls=lambda pipeline_response, _, response_headers: response_headers,
**kwargs
Expand Down Expand Up @@ -330,8 +335,8 @@ def create_hook(
if hook.hook_type == "Webhook":
hook_request = hook._to_generated(name)

response_headers = self._client.create_hook(
hook_request,
response_headers = self._client.create_hook( # type: ignore
hook_request, # type: ignore
cls=lambda pipeline_response, _, response_headers: response_headers,
**kwargs
)
Expand Down Expand Up @@ -387,7 +392,7 @@ def create_metric_anomaly_detection_configuration(
if series_detection_conditions else None,
)

response_headers = self._client.create_anomaly_detection_configuration(
response_headers = self._client.create_anomaly_detection_configuration( # type: ignore
config,
cls=lambda pipeline_response, _, response_headers: response_headers,
**kwargs
Expand Down Expand Up @@ -561,10 +566,10 @@ def refresh_data_feed_ingestion(
"""
self._client.reset_data_feed_ingestion_status(
data_feed_id,
body={
"start_time": start_time,
"end_time": end_time
},
body=_IngestionProgressResetOptions(
start_time=start_time,
end_time=end_time
),
**kwargs
)

Expand Down Expand Up @@ -963,6 +968,7 @@ def update_hook(
else:
hook_id = hook.id
if hook.hook_type == "Email":
hook = cast(EmailHook, hook)
hook_patch = hook._to_generated_patch(
name=update.pop("hookName", None),
description=update.pop("description", None),
Expand All @@ -971,6 +977,7 @@ def update_hook(
)

elif hook.hook_type == "Webhook":
hook = cast(WebHook, hook)
hook_patch = hook._to_generated_patch(
name=update.pop("hookName", None),
description=update.pop("description", None),
Expand Down Expand Up @@ -1021,7 +1028,7 @@ def _convert_to_hook_type(hook):
return EmailHook._from_generated(hook)
return WebHook._from_generated(hook)

return self._client.list_hooks(
return self._client.list_hooks( # type: ignore
hook_name=hook_name,
skip=skip,
cls=kwargs.pop("cls", lambda hooks: [_convert_to_hook_type(hook) for hook in hooks]),
Expand Down Expand Up @@ -1066,7 +1073,7 @@ def list_data_feeds(
creator = kwargs.pop("creator", None)
skip = kwargs.pop("skip", None)

return self._client.list_data_feeds(
return self._client.list_data_feeds( # type: ignore
data_feed_name=data_feed_name,
data_source_type=data_source_type,
granularity_name=granularity_type,
Expand Down Expand Up @@ -1101,7 +1108,7 @@ def list_anomaly_alert_configurations(
:dedent: 4
:caption: List all anomaly alert configurations for specific anomaly detection configuration
"""
return self._client.get_anomaly_alerting_configurations_by_anomaly_detection_configuration(
return self._client.get_anomaly_alerting_configurations_by_anomaly_detection_configuration( # type: ignore
detection_configuration_id,
cls=kwargs.pop("cls", lambda confs: [
AnomalyAlertConfiguration._from_generated(conf) for conf in confs
Expand Down Expand Up @@ -1133,7 +1140,7 @@ def list_metric_anomaly_detection_configurations(
:dedent: 4
:caption: List all anomaly detection configurations for a specific metric
"""
return self._client.get_anomaly_detection_configurations_by_metric(
return self._client.get_anomaly_detection_configurations_by_metric( # type: ignore
metric_id,
cls=kwargs.pop("cls", lambda confs: [
AnomalyDetectionConfiguration._from_generated(conf) for conf in confs
Expand Down Expand Up @@ -1174,12 +1181,12 @@ def list_data_feed_ingestion_status(

skip = kwargs.pop("skip", None)

return self._client.get_data_feed_ingestion_status(
return self._client.get_data_feed_ingestion_status( # type: ignore
data_feed_id=data_feed_id,
body={
"start_time": start_time,
"end_time": end_time
},
body=_IngestionStatusQueryOptions(
start_time=start_time,
end_time=end_time
),
skip=skip,
**kwargs
)
Loading