diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py index ad90a78f46e3..6827d13469d8 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/aio/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py @@ -302,7 +302,7 @@ def get_alerts_by_anomaly_alerting_configuration( configuration_id: str, body: "_models.AlertingResultQuery", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.AlertResultList"]: """Query alerts under anomaly alerting configuration. @@ -315,8 +315,8 @@ def get_alerts_by_anomaly_alerting_configuration( :type body: ~azure.ai.metricsadvisor.models.AlertingResultQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AlertResultList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AlertResultList] @@ -348,8 +348,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'AlertingResultQuery') @@ -401,7 +401,7 @@ def get_anomalies_from_alert_by_anomaly_alerting_configuration( configuration_id: str, alert_id: str, skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.AnomalyResultList"]: """Query anomalies under a specific alert. @@ -414,8 +414,8 @@ def get_anomalies_from_alert_by_anomaly_alerting_configuration( :type alert_id: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyResultList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList] @@ -446,8 +446,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -492,7 +492,7 @@ def get_incidents_from_alert_by_anomaly_alerting_configuration( configuration_id: str, alert_id: str, skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.IncidentResultList"]: """Query incidents under a specific alert. @@ -505,8 +505,8 @@ def get_incidents_from_alert_by_anomaly_alerting_configuration( :type alert_id: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IncidentResultList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList] @@ -537,8 +537,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -972,7 +972,7 @@ def get_anomalies_by_anomaly_detection_configuration( configuration_id: str, body: "_models.DetectionAnomalyResultQuery", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.AnomalyResultList"]: """Query anomalies under anomaly detection configuration. @@ -985,8 +985,8 @@ def get_anomalies_by_anomaly_detection_configuration( :type body: ~azure.ai.metricsadvisor.models.DetectionAnomalyResultQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyResultList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList] @@ -1018,8 +1018,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'DetectionAnomalyResultQuery') @@ -1071,7 +1071,7 @@ def get_dimension_of_anomalies_by_anomaly_detection_configuration( configuration_id: str, body: "_models.AnomalyDimensionQuery", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.AnomalyDimensionList"]: """Query dimension values of anomalies. @@ -1084,8 +1084,8 @@ def get_dimension_of_anomalies_by_anomaly_detection_configuration( :type body: ~azure.ai.metricsadvisor.models.AnomalyDimensionQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyDimensionList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.AnomalyDimensionList] @@ -1117,8 +1117,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'AnomalyDimensionQuery') @@ -1169,7 +1169,7 @@ def get_incidents_by_anomaly_detection_configuration( self, configuration_id: str, body: "_models.DetectionIncidentResultQuery", - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.IncidentResultList"]: """Query incidents under anomaly detection configuration. @@ -1180,8 +1180,8 @@ def get_incidents_by_anomaly_detection_configuration( :type configuration_id: str :param body: query detection incident result request. :type body: ~azure.ai.metricsadvisor.models.DetectionIncidentResultQuery - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IncidentResultList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList] @@ -1211,8 +1211,8 @@ def prepare_request(next_link=None): url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'DetectionIncidentResultQuery') @@ -1260,7 +1260,7 @@ async def get_next(next_link=None): def get_incidents_by_anomaly_detection_configuration_next_pages( self, configuration_id: str, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, token: Optional[str] = None, **kwargs ) -> AsyncIterable["_models.IncidentResultList"]: @@ -1270,8 +1270,8 @@ def get_incidents_by_anomaly_detection_configuration_next_pages( :param configuration_id: anomaly detection configuration unique id. :type configuration_id: str - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :param token: the token for getting the next page. :type token: str :keyword callable cls: A custom type or function that will be passed the direct response @@ -1301,8 +1301,8 @@ def prepare_request(next_link=None): url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') if token is not None: query_parameters['$token'] = self._serialize.query("token", token, 'str') @@ -1424,6 +1424,313 @@ async def get_next(next_link=None): ) get_root_cause_of_incident_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/incidents/{incidentId}/rootCause'} # type: ignore + async def create_credential( + self, + body: "_models.DataSourceCredential", + **kwargs + ) -> None: + """Create a new data source credential. + + Create a new data source credential. + + :param body: Create data source credential request. + :type body: ~azure.ai.metricsadvisor.models.DataSourceCredential + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'DataSourceCredential') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create_credential.metadata = {'url': '/credentials'} # type: ignore + + def list_credentials( + self, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs + ) -> AsyncIterable["_models.DataSourceCredentialList"]: + """List all credentials. + + List all credentials. + + :param skip: for paging, skipped number. + :type skip: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataSourceCredentialList or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.DataSourceCredentialList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredentialList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DataSourceCredentialList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_credentials.metadata = {'url': '/credentials'} # type: ignore + + async def update_credential( + self, + credential_id: str, + body: "_models.DataSourceCredentialPatch", + **kwargs + ) -> None: + """Update a data source credential. + + Update a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :param body: Update data source credential request. + :type body: ~azure.ai.metricsadvisor.models.DataSourceCredentialPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + content_type = kwargs.pop("content_type", "application/merge-patch+json") + accept = "application/json" + + # Construct URL + url = self.update_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'DataSourceCredentialPatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + update_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + + async def delete_credential( + self, + credential_id: str, + **kwargs + ) -> None: + """Delete a data source credential. + + Delete a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.delete_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + delete_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + + async def get_credential( + self, + credential_id: str, + **kwargs + ) -> "_models.DataSourceCredential": + """Get a data source credential. + + Get a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSourceCredential, or the result of cls(response) + :rtype: ~azure.ai.metricsadvisor.models.DataSourceCredential + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredential"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('DataSourceCredential', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + def list_data_feeds( self, data_feed_name: Optional[str] = None, @@ -1432,7 +1739,7 @@ def list_data_feeds( status: Optional[Union[str, "_models.EntityStatus"]] = None, creator: Optional[str] = None, skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.DataFeedList"]: """List all data feeds. @@ -1451,8 +1758,8 @@ def list_data_feeds( :type creator: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataFeedList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.DataFeedList] @@ -1491,8 +1798,8 @@ def prepare_request(next_link=None): query_parameters['creator'] = self._serialize.query("creator", creator, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -1815,7 +2122,7 @@ def list_metric_feedbacks( self, body: "_models.MetricFeedbackFilter", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.MetricFeedbackList"]: """List feedback on the given metric. @@ -1826,8 +2133,8 @@ def list_metric_feedbacks( :type body: ~azure.ai.metricsadvisor.models.MetricFeedbackFilter :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricFeedbackList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricFeedbackList] @@ -1858,8 +2165,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricFeedbackFilter') @@ -1969,7 +2276,7 @@ def list_hooks( self, hook_name: Optional[str] = None, skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.HookList"]: """List all hooks. @@ -1980,8 +2287,8 @@ def list_hooks( :type hook_name: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either HookList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.HookList] @@ -2012,8 +2319,8 @@ def prepare_request(next_link=None): query_parameters['hookName'] = self._serialize.query("hook_name", hook_name, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -2282,7 +2589,7 @@ def get_data_feed_ingestion_status( data_feed_id: str, body: "_models.IngestionStatusQueryOptions", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.IngestionStatusList"]: """Get data ingestion status by data feed. @@ -2295,8 +2602,8 @@ def get_data_feed_ingestion_status( :type body: ~azure.ai.metricsadvisor.models.IngestionStatusQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IngestionStatusList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.IngestionStatusList] @@ -2328,8 +2635,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'IngestionStatusQueryOptions') @@ -2583,7 +2890,7 @@ def get_metric_series( metric_id: str, body: "_models.MetricSeriesQueryOptions", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.MetricSeriesList"]: """List series (dimension combinations) from metric. @@ -2596,8 +2903,8 @@ def get_metric_series( :type body: ~azure.ai.metricsadvisor.models.MetricSeriesQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricSeriesList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricSeriesList] @@ -2629,8 +2936,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricSeriesQueryOptions') @@ -2682,7 +2989,7 @@ def get_metric_dimension( metric_id: str, body: "_models.MetricDimensionQueryOptions", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.MetricDimensionList"]: """List dimension from certain metric. @@ -2695,8 +3002,8 @@ def get_metric_dimension( :type body: ~azure.ai.metricsadvisor.models.MetricDimensionQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricDimensionList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.MetricDimensionList] @@ -2728,8 +3035,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricDimensionQueryOptions') @@ -2857,7 +3164,7 @@ def get_enrichment_status_by_metric( metric_id: str, body: "_models.EnrichmentStatusQueryOption", skip: Optional[int] = None, - top: Optional[int] = None, + maxpagesize: Optional[int] = None, **kwargs ) -> AsyncIterable["_models.EnrichmentStatusList"]: """Query anomaly detection status. @@ -2870,8 +3177,8 @@ def get_enrichment_status_by_metric( :type body: ~azure.ai.metricsadvisor.models.EnrichmentStatusQueryOption :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnrichmentStatusList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatusList] @@ -2903,8 +3210,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'EnrichmentStatusQueryOption') diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/__init__.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/__init__.py index 9bccaea2d44d..02d77111c5d4 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/__init__.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/__init__.py @@ -38,6 +38,12 @@ from ._models_py3 import AzureDataLakeStorageGen2DataFeed from ._models_py3 import AzureDataLakeStorageGen2DataFeedPatch from ._models_py3 import AzureDataLakeStorageGen2Parameter + from ._models_py3 import AzureEventHubsDataFeed + from ._models_py3 import AzureEventHubsDataFeedPatch + from ._models_py3 import AzureEventHubsParameter + from ._models_py3 import AzureSQLConnectionStringCredential + from ._models_py3 import AzureSQLConnectionStringCredentialPatch + from ._models_py3 import AzureSQLConnectionStringParam from ._models_py3 import AzureTableDataFeed from ._models_py3 import AzureTableDataFeedPatch from ._models_py3 import AzureTableParameter @@ -50,6 +56,12 @@ from ._models_py3 import DataFeedDetailPatch from ._models_py3 import DataFeedIngestionProgress from ._models_py3 import DataFeedList + from ._models_py3 import DataLakeGen2SharedKeyCredential + from ._models_py3 import DataLakeGen2SharedKeyCredentialPatch + from ._models_py3 import DataLakeGen2SharedKeyParam + from ._models_py3 import DataSourceCredential + from ._models_py3 import DataSourceCredentialList + from ._models_py3 import DataSourceCredentialPatch from ._models_py3 import DetectionAnomalyFilterCondition from ._models_py3 import DetectionAnomalyResultQuery from ._models_py3 import DetectionIncidentFilterCondition @@ -116,6 +128,12 @@ from ._models_py3 import SeriesIdentity from ._models_py3 import SeriesResult from ._models_py3 import SeriesResultList + from ._models_py3 import ServicePrincipalCredential + from ._models_py3 import ServicePrincipalCredentialPatch + from ._models_py3 import ServicePrincipalInKVCredential + from ._models_py3 import ServicePrincipalInKVCredentialPatch + from ._models_py3 import ServicePrincipalInKVParam + from ._models_py3 import ServicePrincipalParam from ._models_py3 import SeverityCondition from ._models_py3 import SeverityFilterCondition from ._models_py3 import SmartDetectionCondition @@ -160,6 +178,12 @@ from ._models import AzureDataLakeStorageGen2DataFeed # type: ignore from ._models import AzureDataLakeStorageGen2DataFeedPatch # type: ignore from ._models import AzureDataLakeStorageGen2Parameter # type: ignore + from ._models import AzureEventHubsDataFeed # type: ignore + from ._models import AzureEventHubsDataFeedPatch # type: ignore + from ._models import AzureEventHubsParameter # type: ignore + from ._models import AzureSQLConnectionStringCredential # type: ignore + from ._models import AzureSQLConnectionStringCredentialPatch # type: ignore + from ._models import AzureSQLConnectionStringParam # type: ignore from ._models import AzureTableDataFeed # type: ignore from ._models import AzureTableDataFeedPatch # type: ignore from ._models import AzureTableParameter # type: ignore @@ -172,6 +196,12 @@ from ._models import DataFeedDetailPatch # type: ignore from ._models import DataFeedIngestionProgress # type: ignore from ._models import DataFeedList # type: ignore + from ._models import DataLakeGen2SharedKeyCredential # type: ignore + from ._models import DataLakeGen2SharedKeyCredentialPatch # type: ignore + from ._models import DataLakeGen2SharedKeyParam # type: ignore + from ._models import DataSourceCredential # type: ignore + from ._models import DataSourceCredentialList # type: ignore + from ._models import DataSourceCredentialPatch # type: ignore from ._models import DetectionAnomalyFilterCondition # type: ignore from ._models import DetectionAnomalyResultQuery # type: ignore from ._models import DetectionIncidentFilterCondition # type: ignore @@ -238,6 +268,12 @@ from ._models import SeriesIdentity # type: ignore from ._models import SeriesResult # type: ignore from ._models import SeriesResultList # type: ignore + from ._models import ServicePrincipalCredential # type: ignore + from ._models import ServicePrincipalCredentialPatch # type: ignore + from ._models import ServicePrincipalInKVCredential # type: ignore + from ._models import ServicePrincipalInKVCredentialPatch # type: ignore + from ._models import ServicePrincipalInKVParam # type: ignore + from ._models import ServicePrincipalParam # type: ignore from ._models import SeverityCondition # type: ignore from ._models import SeverityFilterCondition # type: ignore from ._models import SmartDetectionCondition # type: ignore @@ -258,7 +294,9 @@ AnomalyScope, AnomalyStatus, AnomalyValue, + AuthenticationTypeEnum, ChangePointValue, + DataSourceCredentialType, DataSourceType, Direction, EntityStatus, @@ -275,6 +313,7 @@ Severity, SnoozeScope, TimeMode, + ValueType, ViewMode, ) @@ -310,6 +349,12 @@ 'AzureDataLakeStorageGen2DataFeed', 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureDataLakeStorageGen2Parameter', + 'AzureEventHubsDataFeed', + 'AzureEventHubsDataFeedPatch', + 'AzureEventHubsParameter', + 'AzureSQLConnectionStringCredential', + 'AzureSQLConnectionStringCredentialPatch', + 'AzureSQLConnectionStringParam', 'AzureTableDataFeed', 'AzureTableDataFeedPatch', 'AzureTableParameter', @@ -322,6 +367,12 @@ 'DataFeedDetailPatch', 'DataFeedIngestionProgress', 'DataFeedList', + 'DataLakeGen2SharedKeyCredential', + 'DataLakeGen2SharedKeyCredentialPatch', + 'DataLakeGen2SharedKeyParam', + 'DataSourceCredential', + 'DataSourceCredentialList', + 'DataSourceCredentialPatch', 'DetectionAnomalyFilterCondition', 'DetectionAnomalyResultQuery', 'DetectionIncidentFilterCondition', @@ -388,6 +439,12 @@ 'SeriesIdentity', 'SeriesResult', 'SeriesResultList', + 'ServicePrincipalCredential', + 'ServicePrincipalCredentialPatch', + 'ServicePrincipalInKVCredential', + 'ServicePrincipalInKVCredentialPatch', + 'ServicePrincipalInKVParam', + 'ServicePrincipalParam', 'SeverityCondition', 'SeverityFilterCondition', 'SmartDetectionCondition', @@ -406,7 +463,9 @@ 'AnomalyScope', 'AnomalyStatus', 'AnomalyValue', + 'AuthenticationTypeEnum', 'ChangePointValue', + 'DataSourceCredentialType', 'DataSourceType', 'Direction', 'EntityStatus', @@ -423,5 +482,6 @@ 'Severity', 'SnoozeScope', 'TimeMode', + 'ValueType', 'ViewMode', ] diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_enums.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_enums.py index 304d7946caa5..6028c708ae20 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_enums.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_enums.py @@ -76,19 +76,42 @@ class AnomalyValue(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): ANOMALY = "Anomaly" NOT_ANOMALY = "NotAnomaly" +class AuthenticationTypeEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """authentication type for corresponding data source + """ + + BASIC = "Basic" + MANAGED_IDENTITY = "ManagedIdentity" + AZURE_SQL_CONNECTION_STRING = "AzureSQLConnectionString" + DATA_LAKE_GEN2_SHARED_KEY = "DataLakeGen2SharedKey" + SERVICE_PRINCIPAL = "ServicePrincipal" + SERVICE_PRINCIPAL_IN_KV = "ServicePrincipalInKV" + class ChangePointValue(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): AUTO_DETECT = "AutoDetect" CHANGE_POINT = "ChangePoint" NOT_CHANGE_POINT = "NotChangePoint" +class DataSourceCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of data source credential + """ + + AZURE_SQL_CONNECTION_STRING = "AzureSQLConnectionString" + DATA_LAKE_GEN2_SHARED_KEY = "DataLakeGen2SharedKey" + SERVICE_PRINCIPAL = "ServicePrincipal" + SERVICE_PRINCIPAL_IN_KV = "ServicePrincipalInKV" + class DataSourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of data source credential + """ AZURE_APPLICATION_INSIGHTS = "AzureApplicationInsights" AZURE_BLOB = "AzureBlob" AZURE_COSMOS_DB = "AzureCosmosDB" AZURE_DATA_EXPLORER = "AzureDataExplorer" AZURE_DATA_LAKE_STORAGE_GEN2 = "AzureDataLakeStorageGen2" + AZURE_EVENT_HUBS = "AzureEventHubs" AZURE_TABLE = "AzureTable" ELASTICSEARCH = "Elasticsearch" HTTP_REQUEST = "HttpRequest" @@ -107,6 +130,8 @@ class Direction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): UP = "Up" class EntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """data feed status + """ ACTIVE = "Active" PAUSED = "Paused" @@ -137,6 +162,8 @@ class FillMissingPointType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NO_FILLING = "NoFilling" class Granularity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """granularity of the time series + """ YEARLY = "Yearly" MONTHLY = "Monthly" @@ -225,6 +252,13 @@ class TimeMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CREATED_TIME = "CreatedTime" MODIFIED_TIME = "ModifiedTime" +class ValueType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """data used to implement value filter + """ + + VALUE = "Value" + MEAN = "Mean" + class ViewMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """data feed access mode, default is Private """ diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models.py index cb0b5a0f1ff3..858e3de08ad4 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models.py @@ -172,6 +172,8 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): include: "AND", "OR", "XOR". :type cross_metrics_operator: str or ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType + :param split_alert_by_dimensions: dimensions used to split alert. + :type split_alert_by_dimensions: list[str] :param hook_ids: Required. hook unique ids. :type hook_ids: list[str] :param metric_alerting_configurations: Required. Anomaly alerting configurations. @@ -182,6 +184,7 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): _validation = { 'anomaly_alerting_configuration_id': {'readonly': True}, 'name': {'required': True}, + 'split_alert_by_dimensions': {'unique': True}, 'hook_ids': {'required': True, 'unique': True}, 'metric_alerting_configurations': {'required': True, 'unique': True}, } @@ -191,6 +194,7 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'}, + 'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'}, 'hook_ids': {'key': 'hookIds', 'type': '[str]'}, 'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'}, } @@ -202,8 +206,9 @@ def __init__( super(AnomalyAlertingConfiguration, self).__init__(**kwargs) self.anomaly_alerting_configuration_id = None self.name = kwargs['name'] - self.description = kwargs.get('description', None) + self.description = kwargs.get('description', "") self.cross_metrics_operator = kwargs.get('cross_metrics_operator', None) + self.split_alert_by_dimensions = kwargs.get('split_alert_by_dimensions', None) self.hook_ids = kwargs['hook_ids'] self.metric_alerting_configurations = kwargs['metric_alerting_configurations'] @@ -244,6 +249,8 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): "XOR". :type cross_metrics_operator: str or ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType + :param split_alert_by_dimensions: dimensions used to split alert. + :type split_alert_by_dimensions: list[str] :param hook_ids: hook unique ids. :type hook_ids: list[str] :param metric_alerting_configurations: Anomaly alerting configurations. @@ -252,6 +259,7 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): """ _validation = { + 'split_alert_by_dimensions': {'unique': True}, 'hook_ids': {'unique': True}, 'metric_alerting_configurations': {'unique': True}, } @@ -260,6 +268,7 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'}, + 'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'}, 'hook_ids': {'key': 'hookIds', 'type': '[str]'}, 'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'}, } @@ -270,8 +279,9 @@ def __init__( ): super(AnomalyAlertingConfigurationPatch, self).__init__(**kwargs) self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) + self.description = kwargs.get('description', "") self.cross_metrics_operator = kwargs.get('cross_metrics_operator', None) + self.split_alert_by_dimensions = kwargs.get('split_alert_by_dimensions', None) self.hook_ids = kwargs.get('hook_ids', None) self.metric_alerting_configurations = kwargs.get('metric_alerting_configurations', None) @@ -326,7 +336,7 @@ def __init__( super(AnomalyDetectionConfiguration, self).__init__(**kwargs) self.anomaly_detection_configuration_id = None self.name = kwargs['name'] - self.description = kwargs.get('description', None) + self.description = kwargs.get('description', "") self.metric_id = kwargs['metric_id'] self.whole_metric_configuration = kwargs['whole_metric_configuration'] self.dimension_group_override_configurations = kwargs.get('dimension_group_override_configurations', None) @@ -393,7 +403,7 @@ def __init__( ): super(AnomalyDetectionConfigurationPatch, self).__init__(**kwargs) self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) + self.description = kwargs.get('description', "") self.whole_metric_configuration = kwargs.get('whole_metric_configuration', None) self.dimension_group_override_configurations = kwargs.get('dimension_group_override_configurations', None) self.series_override_configurations = kwargs.get('series_override_configurations', None) @@ -550,12 +560,12 @@ class AnomalyFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param start_time: the start timestamp of feedback time range. + :param start_time: Required. the start timestamp of feedback time range. :type start_time: ~datetime.datetime - :param end_time: the end timestamp of feedback time range, when equals to startTime means only - one timestamp. + :param end_time: Required. the end timestamp of feedback time range, when equals to startTime + means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.AnomalyFeedbackValue :param anomaly_detection_configuration_id: the corresponding anomaly detection configuration of this feedback. @@ -572,6 +582,9 @@ class AnomalyFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -594,9 +607,9 @@ def __init__( ): super(AnomalyFeedback, self).__init__(**kwargs) self.feedback_type = 'Anomaly' # type: str - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.value = kwargs.get('value', None) + self.start_time = kwargs['start_time'] + self.end_time = kwargs['end_time'] + self.value = kwargs['value'] self.anomaly_detection_configuration_id = kwargs.get('anomaly_detection_configuration_id', None) self.anomaly_detection_configuration_snapshot = kwargs.get('anomaly_detection_configuration_snapshot', None) @@ -641,16 +654,24 @@ class AnomalyProperty(msrest.serialization.Model): only return for alerting anomaly result. Possible values include: "Active", "Resolved". :vartype anomaly_status: str or ~azure.ai.metricsadvisor.models.AnomalyStatus + :ivar value: value of the anomaly. + :vartype value: float + :ivar expected_value: expected value of the anomaly given by smart detector. + :vartype expected_value: float """ _validation = { 'anomaly_severity': {'required': True}, 'anomaly_status': {'readonly': True}, + 'value': {'readonly': True}, + 'expected_value': {'readonly': True}, } _attribute_map = { 'anomaly_severity': {'key': 'anomalySeverity', 'type': 'str'}, 'anomaly_status': {'key': 'anomalyStatus', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'float'}, + 'expected_value': {'key': 'expectedValue', 'type': 'float'}, } def __init__( @@ -660,6 +681,8 @@ def __init__( super(AnomalyProperty, self).__init__(**kwargs) self.anomaly_severity = kwargs['anomaly_severity'] self.anomaly_status = None + self.value = None + self.expected_value = None class AnomalyResult(msrest.serialization.Model): @@ -763,7 +786,7 @@ class DataFeedDetail(msrest.serialization.Model): """DataFeedDetail. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureApplicationInsightsDataFeed, AzureBlobDataFeed, AzureCosmosDBDataFeed, AzureDataExplorerDataFeed, AzureDataLakeStorageGen2DataFeed, AzureTableDataFeed, ElasticsearchDataFeed, HttpRequestDataFeed, InfluxDBDataFeed, MongoDBDataFeed, MySqlDataFeed, PostgreSqlDataFeed, SQLServerDataFeed. + sub-classes are: AzureApplicationInsightsDataFeed, AzureBlobDataFeed, AzureCosmosDBDataFeed, AzureDataExplorerDataFeed, AzureDataLakeStorageGen2DataFeed, AzureEventHubsDataFeed, AzureTableDataFeed, ElasticsearchDataFeed, HttpRequestDataFeed, InfluxDBDataFeed, MongoDBDataFeed, MySqlDataFeed, PostgreSqlDataFeed, SQLServerDataFeed. Variables are only populated by the server, and will be ignored when sending a request. @@ -771,8 +794,8 @@ class DataFeedDetail(msrest.serialization.Model): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -837,6 +860,12 @@ class DataFeedDetail(msrest.serialization.Model): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str """ _validation = { @@ -885,10 +914,12 @@ class DataFeedDetail(msrest.serialization.Model): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, } _subtype_map = { - 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeed', 'AzureBlob': 'AzureBlobDataFeed', 'AzureCosmosDB': 'AzureCosmosDBDataFeed', 'AzureDataExplorer': 'AzureDataExplorerDataFeed', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeed', 'AzureTable': 'AzureTableDataFeed', 'Elasticsearch': 'ElasticsearchDataFeed', 'HttpRequest': 'HttpRequestDataFeed', 'InfluxDB': 'InfluxDBDataFeed', 'MongoDB': 'MongoDBDataFeed', 'MySql': 'MySqlDataFeed', 'PostgreSql': 'PostgreSqlDataFeed', 'SqlServer': 'SQLServerDataFeed'} + 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeed', 'AzureBlob': 'AzureBlobDataFeed', 'AzureCosmosDB': 'AzureCosmosDBDataFeed', 'AzureDataExplorer': 'AzureDataExplorerDataFeed', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeed', 'AzureEventHubs': 'AzureEventHubsDataFeed', 'AzureTable': 'AzureTableDataFeed', 'Elasticsearch': 'ElasticsearchDataFeed', 'HttpRequest': 'HttpRequestDataFeed', 'InfluxDB': 'InfluxDBDataFeed', 'MongoDB': 'MongoDBDataFeed', 'MySql': 'MySqlDataFeed', 'PostgreSql': 'PostgreSqlDataFeed', 'SqlServer': 'SQLServerDataFeed'} } def __init__( @@ -899,12 +930,12 @@ def __init__( self.data_source_type = None # type: Optional[str] self.data_feed_id = None self.data_feed_name = kwargs['data_feed_name'] - self.data_feed_description = kwargs.get('data_feed_description', None) + self.data_feed_description = kwargs.get('data_feed_description', "") self.granularity_name = kwargs['granularity_name'] self.granularity_amount = kwargs.get('granularity_amount', None) self.metrics = kwargs['metrics'] self.dimension = kwargs.get('dimension', None) - self.timestamp_column = kwargs.get('timestamp_column', None) + self.timestamp_column = kwargs.get('timestamp_column', "") self.data_start_from = kwargs['data_start_from'] self.start_offset_in_seconds = kwargs.get('start_offset_in_seconds', 0) self.max_concurrency = kwargs.get('max_concurrency', -1) @@ -923,7 +954,9 @@ def __init__( self.creator = None self.status = None self.created_time = None - self.action_link_template = kwargs.get('action_link_template', None) + self.action_link_template = kwargs.get('action_link_template', "") + self.authentication_type = kwargs.get('authentication_type', None) + self.credential_id = kwargs.get('credential_id', None) class AzureApplicationInsightsDataFeed(DataFeedDetail): @@ -935,8 +968,8 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1001,7 +1034,13 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameter """ @@ -1020,6 +1059,7 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1051,6 +1091,8 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameter'}, } @@ -1060,21 +1102,21 @@ def __init__( ): super(AzureApplicationInsightsDataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureApplicationInsights' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class DataFeedDetailPatch(msrest.serialization.Model): """DataFeedDetailPatch. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureApplicationInsightsDataFeedPatch, AzureBlobDataFeedPatch, AzureCosmosDBDataFeedPatch, AzureDataExplorerDataFeedPatch, AzureDataLakeStorageGen2DataFeedPatch, AzureTableDataFeedPatch, ElasticsearchDataFeedPatch, HttpRequestDataFeedPatch, InfluxDBDataFeedPatch, MongoDBDataFeedPatch, MySqlDataFeedPatch, PostgreSqlDataFeedPatch, SQLServerDataFeedPatch. + sub-classes are: AzureApplicationInsightsDataFeedPatch, AzureBlobDataFeedPatch, AzureCosmosDBDataFeedPatch, AzureDataExplorerDataFeedPatch, AzureDataLakeStorageGen2DataFeedPatch, AzureEventHubsDataFeedPatch, AzureTableDataFeedPatch, ElasticsearchDataFeedPatch, HttpRequestDataFeedPatch, InfluxDBDataFeedPatch, MongoDBDataFeedPatch, MySqlDataFeedPatch, PostgreSqlDataFeedPatch, SQLServerDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1122,6 +1164,12 @@ class DataFeedDetailPatch(msrest.serialization.Model): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str """ _validation = { @@ -1152,10 +1200,12 @@ class DataFeedDetailPatch(msrest.serialization.Model): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, } _subtype_map = { - 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeedPatch', 'AzureBlob': 'AzureBlobDataFeedPatch', 'AzureCosmosDB': 'AzureCosmosDBDataFeedPatch', 'AzureDataExplorer': 'AzureDataExplorerDataFeedPatch', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureTable': 'AzureTableDataFeedPatch', 'Elasticsearch': 'ElasticsearchDataFeedPatch', 'HttpRequest': 'HttpRequestDataFeedPatch', 'InfluxDB': 'InfluxDBDataFeedPatch', 'MongoDB': 'MongoDBDataFeedPatch', 'MySql': 'MySqlDataFeedPatch', 'PostgreSql': 'PostgreSqlDataFeedPatch', 'SqlServer': 'SQLServerDataFeedPatch'} + 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeedPatch', 'AzureBlob': 'AzureBlobDataFeedPatch', 'AzureCosmosDB': 'AzureCosmosDBDataFeedPatch', 'AzureDataExplorer': 'AzureDataExplorerDataFeedPatch', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureEventHubs': 'AzureEventHubsDataFeedPatch', 'AzureTable': 'AzureTableDataFeedPatch', 'Elasticsearch': 'ElasticsearchDataFeedPatch', 'HttpRequest': 'HttpRequestDataFeedPatch', 'InfluxDB': 'InfluxDBDataFeedPatch', 'MongoDB': 'MongoDBDataFeedPatch', 'MySql': 'MySqlDataFeedPatch', 'PostgreSql': 'PostgreSqlDataFeedPatch', 'SqlServer': 'SQLServerDataFeedPatch'} } def __init__( @@ -1183,6 +1233,8 @@ def __init__( self.viewers = kwargs.get('viewers', None) self.status = kwargs.get('status', None) self.action_link_template = kwargs.get('action_link_template', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential_id = kwargs.get('credential_id', None) class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): @@ -1192,8 +1244,8 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1241,6 +1293,12 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameter """ @@ -1273,6 +1331,8 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameter'}, } @@ -1334,8 +1394,8 @@ class AzureBlobDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1400,7 +1460,13 @@ class AzureBlobDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameter """ @@ -1419,6 +1485,7 @@ class AzureBlobDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1450,6 +1517,8 @@ class AzureBlobDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameter'}, } @@ -1459,7 +1528,7 @@ def __init__( ): super(AzureBlobDataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureBlob' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class AzureBlobDataFeedPatch(DataFeedDetailPatch): @@ -1469,8 +1538,8 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1518,6 +1587,12 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameter """ @@ -1550,6 +1625,8 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameter'}, } @@ -1606,8 +1683,8 @@ class AzureCosmosDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1672,7 +1749,13 @@ class AzureCosmosDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameter """ @@ -1691,6 +1774,7 @@ class AzureCosmosDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1722,6 +1806,8 @@ class AzureCosmosDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameter'}, } @@ -1731,7 +1817,7 @@ def __init__( ): super(AzureCosmosDBDataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureCosmosDB' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): @@ -1741,8 +1827,8 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1790,6 +1876,12 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameter """ @@ -1822,6 +1914,8 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameter'}, } @@ -1883,8 +1977,8 @@ class AzureDataExplorerDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1949,7 +2043,13 @@ class AzureDataExplorerDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -1968,6 +2068,7 @@ class AzureDataExplorerDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1999,6 +2100,8 @@ class AzureDataExplorerDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -2008,7 +2111,7 @@ def __init__( ): super(AzureDataExplorerDataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureDataExplorer' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): @@ -2018,8 +2121,8 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2067,6 +2170,12 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -2099,6 +2208,8 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -2120,8 +2231,8 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -2186,7 +2297,13 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2Parameter """ @@ -2205,6 +2322,7 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -2236,6 +2354,8 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2Parameter'}, } @@ -2245,7 +2365,7 @@ def __init__( ): super(AzureDataLakeStorageGen2DataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureDataLakeStorageGen2' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): @@ -2255,8 +2375,8 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2304,6 +2424,12 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2Parameter """ @@ -2336,6 +2462,8 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2Parameter'}, } @@ -2355,7 +2483,7 @@ class AzureDataLakeStorageGen2Parameter(msrest.serialization.Model): :param account_name: Required. Account name. :type account_name: str - :param account_key: Required. Account key. + :param account_key: Account key. :type account_key: str :param file_system_name: Required. File system name (Container). :type file_system_name: str @@ -2366,31 +2494,517 @@ class AzureDataLakeStorageGen2Parameter(msrest.serialization.Model): """ _validation = { - 'account_name': {'required': True}, - 'account_key': {'required': True}, - 'file_system_name': {'required': True}, - 'directory_template': {'required': True}, - 'file_template': {'required': True}, + 'account_name': {'required': True}, + 'file_system_name': {'required': True}, + 'directory_template': {'required': True}, + 'file_template': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, + 'directory_template': {'key': 'directoryTemplate', 'type': 'str'}, + 'file_template': {'key': 'fileTemplate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataLakeStorageGen2Parameter, self).__init__(**kwargs) + self.account_name = kwargs['account_name'] + self.account_key = kwargs.get('account_key', None) + self.file_system_name = kwargs['file_system_name'] + self.directory_template = kwargs['directory_template'] + self.file_template = kwargs['file_template'] + + +class AzureEventHubsDataFeed(DataFeedDetail): + """AzureEventHubsDataFeed. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_type: Required. data source type.Constant filled by server. Possible values + include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :ivar data_feed_id: data feed unique id. + :vartype data_feed_id: str + :param data_feed_name: Required. data feed name. + :type data_feed_name: str + :param data_feed_description: data feed description. + :type data_feed_description: str + :param granularity_name: Required. granularity of the time series. Possible values include: + "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". + :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity + :param granularity_amount: if granularity is custom,it is required. + :type granularity_amount: int + :param metrics: Required. measure list. + :type metrics: list[~azure.ai.metricsadvisor.models.Metric] + :param dimension: dimension list. + :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] + :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time + of every time slice will be used as default value. + :type timestamp_column: str + :param data_start_from: Required. ingestion start time. + :type data_start_from: ~datetime.datetime + :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay + for every data slice according to this offset. + :type start_offset_in_seconds: long + :param max_concurrency: the max concurrency of data ingestion queries against user data source. + 0 means no limitation. + :type max_concurrency: int + :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. + :type min_retry_interval_in_seconds: long + :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first + schedule time in seconds. + :type stop_retry_after_in_seconds: long + :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", + "NeedRollup", "AlreadyRollup". + :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum + :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", + "Avg", "Count". + :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod + :param roll_up_columns: roll up columns. + :type roll_up_columns: list[str] + :param all_up_identification: the identification value for the row of calculated all-up value. + :type all_up_identification: str + :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible + values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". + :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType + :param fill_missing_point_value: the value of fill missing point for anomaly detection. + :type fill_missing_point_value: float + :param view_mode: data feed access mode, default is Private. Possible values include: + "Private", "Public". + :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode + :param admins: data feed administrator. + :type admins: list[str] + :param viewers: data feed viewer. + :type viewers: list[str] + :ivar is_admin: the query user is one of data feed administrator or not. + :vartype is_admin: bool + :ivar creator: data feed creator. + :vartype creator: str + :ivar status: data feed status. Possible values include: "Active", "Paused". + :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus + :ivar created_time: data feed created time. + :vartype created_time: ~datetime.datetime + :param action_link_template: action link for alert. + :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. + :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameter + """ + + _validation = { + 'data_source_type': {'required': True}, + 'data_feed_id': {'readonly': True}, + 'data_feed_name': {'required': True}, + 'granularity_name': {'required': True}, + 'metrics': {'required': True, 'unique': True}, + 'dimension': {'unique': True}, + 'data_start_from': {'required': True}, + 'roll_up_columns': {'unique': True}, + 'admins': {'unique': True}, + 'viewers': {'unique': True}, + 'is_admin': {'readonly': True}, + 'creator': {'readonly': True}, + 'status': {'readonly': True}, + 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, + } + + _attribute_map = { + 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, + 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, + 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, + 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, + 'granularity_name': {'key': 'granularityName', 'type': 'str'}, + 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, + 'metrics': {'key': 'metrics', 'type': '[Metric]'}, + 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, + 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, + 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, + 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, + 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, + 'need_rollup': {'key': 'needRollup', 'type': 'str'}, + 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, + 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, + 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, + 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, + 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, + 'view_mode': {'key': 'viewMode', 'type': 'str'}, + 'admins': {'key': 'admins', 'type': '[str]'}, + 'viewers': {'key': 'viewers', 'type': '[str]'}, + 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, + 'creator': {'key': 'creator', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, + 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, + 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameter'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEventHubsDataFeed, self).__init__(**kwargs) + self.data_source_type = 'AzureEventHubs' # type: str + self.data_source_parameter = kwargs['data_source_parameter'] + + +class AzureEventHubsDataFeedPatch(DataFeedDetailPatch): + """AzureEventHubsDataFeedPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_type: Required. data source type.Constant filled by server. Possible values + include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_feed_name: data feed name. + :type data_feed_name: str + :param data_feed_description: data feed description. + :type data_feed_description: str + :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time + of every time slice will be used as default value. + :type timestamp_column: str + :param data_start_from: ingestion start time. + :type data_start_from: ~datetime.datetime + :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay + for every data slice according to this offset. + :type start_offset_in_seconds: long + :param max_concurrency: the max concurrency of data ingestion queries against user data source. + 0 means no limitation. + :type max_concurrency: int + :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. + :type min_retry_interval_in_seconds: long + :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first + schedule time in seconds. + :type stop_retry_after_in_seconds: long + :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", + "NeedRollup", "AlreadyRollup". + :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum + :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", + "Avg", "Count". + :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod + :param roll_up_columns: roll up columns. + :type roll_up_columns: list[str] + :param all_up_identification: the identification value for the row of calculated all-up value. + :type all_up_identification: str + :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible + values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". + :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType + :param fill_missing_point_value: the value of fill missing point for anomaly detection. + :type fill_missing_point_value: float + :param view_mode: data feed access mode, default is Private. Possible values include: + "Private", "Public". + :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode + :param admins: data feed administrator. + :type admins: list[str] + :param viewers: data feed viewer. + :type viewers: list[str] + :param status: data feed status. Possible values include: "Active", "Paused". + :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus + :param action_link_template: action link for alert. + :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: + :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameter + """ + + _validation = { + 'data_source_type': {'required': True}, + 'roll_up_columns': {'unique': True}, + 'admins': {'unique': True}, + 'viewers': {'unique': True}, + } + + _attribute_map = { + 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, + 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, + 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, + 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, + 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, + 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, + 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, + 'need_rollup': {'key': 'needRollup', 'type': 'str'}, + 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, + 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, + 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, + 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, + 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, + 'view_mode': {'key': 'viewMode', 'type': 'str'}, + 'admins': {'key': 'admins', 'type': '[str]'}, + 'viewers': {'key': 'viewers', 'type': '[str]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, + 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameter'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEventHubsDataFeedPatch, self).__init__(**kwargs) + self.data_source_type = 'AzureEventHubs' # type: str + self.data_source_parameter = kwargs.get('data_source_parameter', None) + + +class AzureEventHubsParameter(msrest.serialization.Model): + """AzureEventHubsParameter. + + All required parameters must be populated in order to send to Azure. + + :param connection_string: Required. Azure Event Hubs connection string. + :type connection_string: str + :param consumer_group: Required. Azure Event Hubs consumer group. + :type consumer_group: str + """ + + _validation = { + 'connection_string': {'required': True}, + 'consumer_group': {'required': True}, + } + + _attribute_map = { + 'connection_string': {'key': 'connectionString', 'type': 'str'}, + 'consumer_group': {'key': 'consumerGroup', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEventHubsParameter, self).__init__(**kwargs) + self.connection_string = kwargs['connection_string'] + self.consumer_group = kwargs['consumer_group'] + + +class DataSourceCredential(msrest.serialization.Model): + """DataSourceCredential. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSQLConnectionStringCredential, DataLakeGen2SharedKeyCredential, ServicePrincipalCredential, ServicePrincipalInKVCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + } + + _subtype_map = { + 'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredential', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredential', 'ServicePrincipal': 'ServicePrincipalCredential', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredential'} + } + + def __init__( + self, + **kwargs + ): + super(DataSourceCredential, self).__init__(**kwargs) + self.data_source_credential_type = None # type: Optional[str] + self.data_source_credential_id = None + self.data_source_credential_name = kwargs['data_source_credential_name'] + self.data_source_credential_description = kwargs.get('data_source_credential_description', None) + + +class AzureSQLConnectionStringCredential(DataSourceCredential): + """AzureSQLConnectionStringCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParam'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSQLConnectionStringCredential, self).__init__(**kwargs) + self.data_source_credential_type = 'AzureSQLConnectionString' # type: str + self.parameters = kwargs['parameters'] + + +class DataSourceCredentialPatch(msrest.serialization.Model): + """DataSourceCredentialPatch. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSQLConnectionStringCredentialPatch, DataLakeGen2SharedKeyCredentialPatch, ServicePrincipalCredentialPatch, ServicePrincipalInKVCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + } + + _subtype_map = { + 'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredentialPatch', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredentialPatch', 'ServicePrincipal': 'ServicePrincipalCredentialPatch', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredentialPatch'} + } + + def __init__( + self, + **kwargs + ): + super(DataSourceCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = None # type: Optional[str] + self.data_source_credential_name = kwargs.get('data_source_credential_name', None) + self.data_source_credential_description = kwargs.get('data_source_credential_description', None) + + +class AzureSQLConnectionStringCredentialPatch(DataSourceCredentialPatch): + """AzureSQLConnectionStringCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParam'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureSQLConnectionStringCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = 'AzureSQLConnectionString' # type: str + self.parameters = kwargs.get('parameters', None) + + +class AzureSQLConnectionStringParam(msrest.serialization.Model): + """AzureSQLConnectionStringParam. + + All required parameters must be populated in order to send to Azure. + + :param connection_string: Required. The connection string to access the Azure SQL. + :type connection_string: str + """ + + _validation = { + 'connection_string': {'required': True}, } _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'account_key': {'key': 'accountKey', 'type': 'str'}, - 'file_system_name': {'key': 'fileSystemName', 'type': 'str'}, - 'directory_template': {'key': 'directoryTemplate', 'type': 'str'}, - 'file_template': {'key': 'fileTemplate', 'type': 'str'}, + 'connection_string': {'key': 'connectionString', 'type': 'str'}, } def __init__( self, **kwargs ): - super(AzureDataLakeStorageGen2Parameter, self).__init__(**kwargs) - self.account_name = kwargs['account_name'] - self.account_key = kwargs['account_key'] - self.file_system_name = kwargs['file_system_name'] - self.directory_template = kwargs['directory_template'] - self.file_template = kwargs['file_template'] + super(AzureSQLConnectionStringParam, self).__init__(**kwargs) + self.connection_string = kwargs['connection_string'] class AzureTableDataFeed(DataFeedDetail): @@ -2402,8 +3016,8 @@ class AzureTableDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -2468,7 +3082,13 @@ class AzureTableDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameter """ @@ -2487,6 +3107,7 @@ class AzureTableDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -2518,6 +3139,8 @@ class AzureTableDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameter'}, } @@ -2527,7 +3150,7 @@ def __init__( ): super(AzureTableDataFeed, self).__init__(**kwargs) self.data_source_type = 'AzureTable' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class AzureTableDataFeedPatch(DataFeedDetailPatch): @@ -2537,8 +3160,8 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2586,6 +3209,12 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameter """ @@ -2618,6 +3247,8 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameter'}, } @@ -2685,12 +3316,12 @@ class ChangePointFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param start_time: the start timestamp of feedback time range. + :param start_time: Required. the start timestamp of feedback time range. :type start_time: ~datetime.datetime - :param end_time: the end timestamp of feedback time range, when equals to startTime means only - one timestamp. + :param end_time: Required. the end timestamp of feedback time range, when equals to startTime + means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.ChangePointFeedbackValue """ @@ -2701,6 +3332,9 @@ class ChangePointFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -2721,9 +3355,9 @@ def __init__( ): super(ChangePointFeedback, self).__init__(**kwargs) self.feedback_type = 'ChangePoint' # type: str - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.value = kwargs.get('value', None) + self.start_time = kwargs['start_time'] + self.end_time = kwargs['end_time'] + self.value = kwargs['value'] class ChangePointFeedbackValue(msrest.serialization.Model): @@ -2826,7 +3460,7 @@ class CommentFeedback(MetricFeedback): :param end_time: the end timestamp of feedback time range, when equals to startTime means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.CommentFeedbackValue """ @@ -2837,6 +3471,7 @@ class CommentFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -2859,7 +3494,7 @@ def __init__( self.feedback_type = 'Comment' # type: str self.start_time = kwargs.get('start_time', None) self.end_time = kwargs.get('end_time', None) - self.value = kwargs.get('value', None) + self.value = kwargs['value'] class CommentFeedbackValue(msrest.serialization.Model): @@ -2949,6 +3584,145 @@ def __init__( self.value = None +class DataLakeGen2SharedKeyCredential(DataSourceCredential): + """DataLakeGen2SharedKeyCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParam'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeGen2SharedKeyCredential, self).__init__(**kwargs) + self.data_source_credential_type = 'DataLakeGen2SharedKey' # type: str + self.parameters = kwargs['parameters'] + + +class DataLakeGen2SharedKeyCredentialPatch(DataSourceCredentialPatch): + """DataLakeGen2SharedKeyCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParam'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeGen2SharedKeyCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = 'DataLakeGen2SharedKey' # type: str + self.parameters = kwargs.get('parameters', None) + + +class DataLakeGen2SharedKeyParam(msrest.serialization.Model): + """DataLakeGen2SharedKeyParam. + + All required parameters must be populated in order to send to Azure. + + :param account_key: Required. The account key to access the Azure Data Lake Storage Gen2. + :type account_key: str + """ + + _validation = { + 'account_key': {'required': True}, + } + + _attribute_map = { + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeGen2SharedKeyParam, self).__init__(**kwargs) + self.account_key = kwargs['account_key'] + + +class DataSourceCredentialList(msrest.serialization.Model): + """DataSourceCredentialList. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar next_link: + :vartype next_link: str + :ivar value: + :vartype value: list[~azure.ai.metricsadvisor.models.DataSourceCredential] + """ + + _validation = { + 'next_link': {'readonly': True}, + 'value': {'readonly': True, 'unique': True}, + } + + _attribute_map = { + 'next_link': {'key': '@nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[DataSourceCredential]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataSourceCredentialList, self).__init__(**kwargs) + self.next_link = None + self.value = None + + class DetectionAnomalyFilterCondition(msrest.serialization.Model): """DetectionAnomalyFilterCondition. @@ -3211,8 +3985,8 @@ class ElasticsearchDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -3277,7 +4051,13 @@ class ElasticsearchDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.ElasticsearchParameter """ @@ -3296,6 +4076,7 @@ class ElasticsearchDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -3327,6 +4108,8 @@ class ElasticsearchDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'ElasticsearchParameter'}, } @@ -3336,7 +4119,7 @@ def __init__( ): super(ElasticsearchDataFeed, self).__init__(**kwargs) self.data_source_type = 'Elasticsearch' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class ElasticsearchDataFeedPatch(DataFeedDetailPatch): @@ -3346,8 +4129,8 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -3395,6 +4178,12 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.ElasticsearchParameter """ @@ -3427,6 +4216,8 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'ElasticsearchParameter'}, } @@ -3532,8 +4323,8 @@ def __init__( self.hook_type = None # type: Optional[str] self.hook_id = None self.hook_name = kwargs['hook_name'] - self.description = kwargs.get('description', None) - self.external_link = kwargs.get('external_link', None) + self.description = kwargs.get('description', "") + self.external_link = kwargs.get('external_link', "") self.admins = None @@ -3557,7 +4348,7 @@ class EmailHookInfo(HookInfo): :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] - :param hook_parameter: + :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.EmailHookParameter """ @@ -3566,6 +4357,7 @@ class EmailHookInfo(HookInfo): 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, + 'hook_parameter': {'required': True}, } _attribute_map = { @@ -3584,7 +4376,7 @@ def __init__( ): super(EmailHookInfo, self).__init__(**kwargs) self.hook_type = 'Email' # type: str - self.hook_parameter = kwargs.get('hook_parameter', None) + self.hook_parameter = kwargs['hook_parameter'] class HookInfoPatch(msrest.serialization.Model): @@ -3935,8 +4727,8 @@ class HttpRequestDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -4001,7 +4793,13 @@ class HttpRequestDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.HttpRequestParameter """ @@ -4020,6 +4818,7 @@ class HttpRequestDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -4051,6 +4850,8 @@ class HttpRequestDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'HttpRequestParameter'}, } @@ -4060,7 +4861,7 @@ def __init__( ): super(HttpRequestDataFeed, self).__init__(**kwargs) self.data_source_type = 'HttpRequest' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class HttpRequestDataFeedPatch(DataFeedDetailPatch): @@ -4070,8 +4871,8 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -4119,6 +4920,12 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.HttpRequestParameter """ @@ -4151,6 +4958,8 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'HttpRequestParameter'}, } @@ -4217,16 +5026,24 @@ class IncidentProperty(msrest.serialization.Model): only return for alerting incident result. Possible values include: "Active", "Resolved". :vartype incident_status: str or ~azure.ai.metricsadvisor.models.IncidentStatus + :ivar value_of_root_node: value of the root node. + :vartype value_of_root_node: float + :ivar expected_value_of_root_node: expected value of the root node given by smart detector. + :vartype expected_value_of_root_node: float """ _validation = { 'max_severity': {'required': True}, 'incident_status': {'readonly': True}, + 'value_of_root_node': {'readonly': True}, + 'expected_value_of_root_node': {'readonly': True}, } _attribute_map = { 'max_severity': {'key': 'maxSeverity', 'type': 'str'}, 'incident_status': {'key': 'incidentStatus', 'type': 'str'}, + 'value_of_root_node': {'key': 'valueOfRootNode', 'type': 'float'}, + 'expected_value_of_root_node': {'key': 'expectedValueOfRootNode', 'type': 'float'}, } def __init__( @@ -4236,6 +5053,8 @@ def __init__( super(IncidentProperty, self).__init__(**kwargs) self.max_severity = kwargs['max_severity'] self.incident_status = None + self.value_of_root_node = None + self.expected_value_of_root_node = None class IncidentResult(msrest.serialization.Model): @@ -4340,8 +5159,8 @@ class InfluxDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -4406,7 +5225,13 @@ class InfluxDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameter """ @@ -4425,6 +5250,7 @@ class InfluxDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -4456,6 +5282,8 @@ class InfluxDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameter'}, } @@ -4465,7 +5293,7 @@ def __init__( ): super(InfluxDBDataFeed, self).__init__(**kwargs) self.data_source_type = 'InfluxDB' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class InfluxDBDataFeedPatch(DataFeedDetailPatch): @@ -4475,8 +5303,8 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -4524,6 +5352,12 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameter """ @@ -4556,6 +5390,8 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameter'}, } @@ -4827,7 +5663,7 @@ def __init__( super(MetricAlertingConfiguration, self).__init__(**kwargs) self.anomaly_detection_configuration_id = kwargs['anomaly_detection_configuration_id'] self.anomaly_scope_type = kwargs['anomaly_scope_type'] - self.negation_operation = kwargs.get('negation_operation', None) + self.negation_operation = kwargs.get('negation_operation', False) self.dimension_anomaly_scope = kwargs.get('dimension_anomaly_scope', None) self.top_n_anomaly_scope = kwargs.get('top_n_anomaly_scope', None) self.severity_filter = kwargs.get('severity_filter', None) @@ -4899,11 +5735,11 @@ class MetricDataQueryOptions(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param start_time: Required. start time of query a time series data, and format should be yyyy- - MM-ddThh:mm:ssZ. + :param start_time: Required. start time of query a time series data, and format should be + yyyy-MM-ddThh:mm:ssZ. :type start_time: ~datetime.datetime - :param end_time: Required. start time of query a time series data, and format should be yyyy- - MM-ddThh:mm:ssZ. + :param end_time: Required. start time of query a time series data, and format should be + yyyy-MM-ddThh:mm:ssZ. :type end_time: ~datetime.datetime :param series: Required. query specific series. :type series: list[dict[str, str]] @@ -5166,8 +6002,8 @@ class MongoDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -5232,7 +6068,13 @@ class MongoDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameter """ @@ -5251,6 +6093,7 @@ class MongoDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -5282,6 +6125,8 @@ class MongoDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameter'}, } @@ -5291,7 +6136,7 @@ def __init__( ): super(MongoDBDataFeed, self).__init__(**kwargs) self.data_source_type = 'MongoDB' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class MongoDBDataFeedPatch(DataFeedDetailPatch): @@ -5301,8 +6146,8 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -5350,6 +6195,12 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameter """ @@ -5382,6 +6233,8 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameter'}, } @@ -5438,8 +6291,8 @@ class MySqlDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -5504,7 +6357,13 @@ class MySqlDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -5523,6 +6382,7 @@ class MySqlDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -5554,6 +6414,8 @@ class MySqlDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -5563,7 +6425,7 @@ def __init__( ): super(MySqlDataFeed, self).__init__(**kwargs) self.data_source_type = 'MySql' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class MySqlDataFeedPatch(DataFeedDetailPatch): @@ -5573,8 +6435,8 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -5622,6 +6484,12 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -5654,6 +6522,8 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -5686,7 +6556,7 @@ class PeriodFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.PeriodFeedbackValue """ @@ -5697,6 +6567,7 @@ class PeriodFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -5715,7 +6586,7 @@ def __init__( ): super(PeriodFeedback, self).__init__(**kwargs) self.feedback_type = 'Period' # type: str - self.value = kwargs.get('value', None) + self.value = kwargs['value'] class PeriodFeedbackValue(msrest.serialization.Model): @@ -5759,8 +6630,8 @@ class PostgreSqlDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -5825,7 +6696,13 @@ class PostgreSqlDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -5844,6 +6721,7 @@ class PostgreSqlDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -5875,6 +6753,8 @@ class PostgreSqlDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -5884,7 +6764,7 @@ def __init__( ): super(PostgreSqlDataFeed, self).__init__(**kwargs) self.data_source_type = 'PostgreSql' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class PostgreSqlDataFeedPatch(DataFeedDetailPatch): @@ -5894,8 +6774,8 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -5943,6 +6823,12 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -5975,6 +6861,8 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6209,6 +7097,261 @@ def __init__( self.value = kwargs['value'] +class ServicePrincipalCredential(DataSourceCredential): + """ServicePrincipalCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParam'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(**kwargs) + self.data_source_credential_type = 'ServicePrincipal' # type: str + self.parameters = kwargs['parameters'] + + +class ServicePrincipalCredentialPatch(DataSourceCredentialPatch): + """ServicePrincipalCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParam'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = 'ServicePrincipal' # type: str + self.parameters = kwargs.get('parameters', None) + + +class ServicePrincipalInKVCredential(DataSourceCredential): + """ServicePrincipalInKVCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParam'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalInKVCredential, self).__init__(**kwargs) + self.data_source_credential_type = 'ServicePrincipalInKV' # type: str + self.parameters = kwargs['parameters'] + + +class ServicePrincipalInKVCredentialPatch(DataSourceCredentialPatch): + """ServicePrincipalInKVCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParam'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalInKVCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = 'ServicePrincipalInKV' # type: str + self.parameters = kwargs.get('parameters', None) + + +class ServicePrincipalInKVParam(msrest.serialization.Model): + """ServicePrincipalInKVParam. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_endpoint: Required. The Key Vault endpoint that storing the service principal. + :type key_vault_endpoint: str + :param key_vault_client_id: Required. The Client Id to access the Key Vault. + :type key_vault_client_id: str + :param key_vault_client_secret: Required. The Client Secret to access the Key Vault. + :type key_vault_client_secret: str + :param service_principal_id_name_in_kv: Required. The secret name of the service principal's + client Id in the Key Vault. + :type service_principal_id_name_in_kv: str + :param service_principal_secret_name_in_kv: Required. The secret name of the service + principal's client secret in the Key Vault. + :type service_principal_secret_name_in_kv: str + :param tenant_id: Required. The tenant id of your service principal. + :type tenant_id: str + """ + + _validation = { + 'key_vault_endpoint': {'required': True}, + 'key_vault_client_id': {'required': True}, + 'key_vault_client_secret': {'required': True}, + 'service_principal_id_name_in_kv': {'required': True}, + 'service_principal_secret_name_in_kv': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'key_vault_endpoint': {'key': 'keyVaultEndpoint', 'type': 'str'}, + 'key_vault_client_id': {'key': 'keyVaultClientId', 'type': 'str'}, + 'key_vault_client_secret': {'key': 'keyVaultClientSecret', 'type': 'str'}, + 'service_principal_id_name_in_kv': {'key': 'servicePrincipalIdNameInKV', 'type': 'str'}, + 'service_principal_secret_name_in_kv': {'key': 'servicePrincipalSecretNameInKV', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalInKVParam, self).__init__(**kwargs) + self.key_vault_endpoint = kwargs['key_vault_endpoint'] + self.key_vault_client_id = kwargs['key_vault_client_id'] + self.key_vault_client_secret = kwargs['key_vault_client_secret'] + self.service_principal_id_name_in_kv = kwargs['service_principal_id_name_in_kv'] + self.service_principal_secret_name_in_kv = kwargs['service_principal_secret_name_in_kv'] + self.tenant_id = kwargs['tenant_id'] + + +class ServicePrincipalParam(msrest.serialization.Model): + """ServicePrincipalParam. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. The client id of the service principal. + :type client_id: str + :param client_secret: Required. The client secret of the service principal. + :type client_secret: str + :param tenant_id: Required. The tenant id of the service principal. + :type tenant_id: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalParam, self).__init__(**kwargs) + self.client_id = kwargs['client_id'] + self.client_secret = kwargs['client_secret'] + self.tenant_id = kwargs['tenant_id'] + + class SeverityCondition(msrest.serialization.Model): """SeverityCondition. @@ -6317,8 +7460,8 @@ class SQLServerDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -6383,7 +7526,13 @@ class SQLServerDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6402,6 +7551,7 @@ class SQLServerDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -6433,6 +7583,8 @@ class SQLServerDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6442,7 +7594,7 @@ def __init__( ): super(SQLServerDataFeed, self).__init__(**kwargs) self.data_source_type = 'SqlServer' # type: str - self.data_source_parameter = kwargs.get('data_source_parameter', None) + self.data_source_parameter = kwargs['data_source_parameter'] class SQLServerDataFeedPatch(DataFeedDetailPatch): @@ -6452,8 +7604,8 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -6501,6 +7653,12 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6533,6 +7691,8 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6550,14 +7710,13 @@ class SqlSourceParameter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param connection_string: Required. Database connection string. + :param connection_string: Database connection string. :type connection_string: str :param query: Required. Query script. :type query: str """ _validation = { - 'connection_string': {'required': True}, 'query': {'required': True}, } @@ -6571,7 +7730,7 @@ def __init__( **kwargs ): super(SqlSourceParameter, self).__init__(**kwargs) - self.connection_string = kwargs['connection_string'] + self.connection_string = kwargs.get('connection_string', None) self.query = kwargs['query'] @@ -6703,6 +7862,9 @@ class ValueCondition(msrest.serialization.Model): :param direction: Required. value filter direction. Possible values include: "Both", "Down", "Up". :type direction: str or ~azure.ai.metricsadvisor.models.Direction + :param type: data used to implement value filter. Possible values include: "Value", "Mean". + Default value: "Value". + :type type: str or ~azure.ai.metricsadvisor.models.ValueType :param metric_id: the other metric unique id used for value filter. :type metric_id: str :param trigger_for_missing: trigger alert when the corresponding point is missing in the other @@ -6720,6 +7882,7 @@ class ValueCondition(msrest.serialization.Model): 'lower': {'key': 'lower', 'type': 'float'}, 'upper': {'key': 'upper', 'type': 'float'}, 'direction': {'key': 'direction', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'trigger_for_missing': {'key': 'triggerForMissing', 'type': 'bool'}, } @@ -6732,6 +7895,7 @@ def __init__( self.lower = kwargs.get('lower', None) self.upper = kwargs.get('upper', None) self.direction = kwargs['direction'] + self.type = kwargs.get('type', "Value") self.metric_id = kwargs.get('metric_id', None) self.trigger_for_missing = kwargs.get('trigger_for_missing', None) @@ -6756,7 +7920,7 @@ class WebhookHookInfo(HookInfo): :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] - :param hook_parameter: + :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.WebhookHookParameter """ @@ -6765,6 +7929,7 @@ class WebhookHookInfo(HookInfo): 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, + 'hook_parameter': {'required': True}, } _attribute_map = { @@ -6783,7 +7948,7 @@ def __init__( ): super(WebhookHookInfo, self).__init__(**kwargs) self.hook_type = 'Webhook' # type: str - self.hook_parameter = kwargs.get('hook_parameter', None) + self.hook_parameter = kwargs['hook_parameter'] class WebhookHookInfoPatch(HookInfoPatch): diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models_py3.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models_py3.py index 4e6b138da926..f8d3d68e7a5b 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models_py3.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/models/_models_py3.py @@ -187,6 +187,8 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): include: "AND", "OR", "XOR". :type cross_metrics_operator: str or ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType + :param split_alert_by_dimensions: dimensions used to split alert. + :type split_alert_by_dimensions: list[str] :param hook_ids: Required. hook unique ids. :type hook_ids: list[str] :param metric_alerting_configurations: Required. Anomaly alerting configurations. @@ -197,6 +199,7 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): _validation = { 'anomaly_alerting_configuration_id': {'readonly': True}, 'name': {'required': True}, + 'split_alert_by_dimensions': {'unique': True}, 'hook_ids': {'required': True, 'unique': True}, 'metric_alerting_configurations': {'required': True, 'unique': True}, } @@ -206,6 +209,7 @@ class AnomalyAlertingConfiguration(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'}, + 'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'}, 'hook_ids': {'key': 'hookIds', 'type': '[str]'}, 'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'}, } @@ -216,8 +220,9 @@ def __init__( name: str, hook_ids: List[str], metric_alerting_configurations: List["MetricAlertingConfiguration"], - description: Optional[str] = None, + description: Optional[str] = "", cross_metrics_operator: Optional[Union[str, "AnomalyAlertingConfigurationLogicType"]] = None, + split_alert_by_dimensions: Optional[List[str]] = None, **kwargs ): super(AnomalyAlertingConfiguration, self).__init__(**kwargs) @@ -225,6 +230,7 @@ def __init__( self.name = name self.description = description self.cross_metrics_operator = cross_metrics_operator + self.split_alert_by_dimensions = split_alert_by_dimensions self.hook_ids = hook_ids self.metric_alerting_configurations = metric_alerting_configurations @@ -267,6 +273,8 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): "XOR". :type cross_metrics_operator: str or ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType + :param split_alert_by_dimensions: dimensions used to split alert. + :type split_alert_by_dimensions: list[str] :param hook_ids: hook unique ids. :type hook_ids: list[str] :param metric_alerting_configurations: Anomaly alerting configurations. @@ -275,6 +283,7 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): """ _validation = { + 'split_alert_by_dimensions': {'unique': True}, 'hook_ids': {'unique': True}, 'metric_alerting_configurations': {'unique': True}, } @@ -283,6 +292,7 @@ class AnomalyAlertingConfigurationPatch(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'}, + 'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'}, 'hook_ids': {'key': 'hookIds', 'type': '[str]'}, 'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'}, } @@ -291,8 +301,9 @@ def __init__( self, *, name: Optional[str] = None, - description: Optional[str] = None, + description: Optional[str] = "", cross_metrics_operator: Optional[Union[str, "AnomalyAlertingConfigurationLogicType"]] = None, + split_alert_by_dimensions: Optional[List[str]] = None, hook_ids: Optional[List[str]] = None, metric_alerting_configurations: Optional[List["MetricAlertingConfiguration"]] = None, **kwargs @@ -301,6 +312,7 @@ def __init__( self.name = name self.description = description self.cross_metrics_operator = cross_metrics_operator + self.split_alert_by_dimensions = split_alert_by_dimensions self.hook_ids = hook_ids self.metric_alerting_configurations = metric_alerting_configurations @@ -354,7 +366,7 @@ def __init__( name: str, metric_id: str, whole_metric_configuration: "WholeMetricConfiguration", - description: Optional[str] = None, + description: Optional[str] = "", dimension_group_override_configurations: Optional[List["DimensionGroupConfiguration"]] = None, series_override_configurations: Optional[List["SeriesConfiguration"]] = None, **kwargs @@ -429,7 +441,7 @@ def __init__( self, *, name: Optional[str] = None, - description: Optional[str] = None, + description: Optional[str] = "", whole_metric_configuration: Optional["WholeMetricConfiguration"] = None, dimension_group_override_configurations: Optional[List["DimensionGroupConfiguration"]] = None, series_override_configurations: Optional[List["SeriesConfiguration"]] = None, @@ -604,12 +616,12 @@ class AnomalyFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param start_time: the start timestamp of feedback time range. + :param start_time: Required. the start timestamp of feedback time range. :type start_time: ~datetime.datetime - :param end_time: the end timestamp of feedback time range, when equals to startTime means only - one timestamp. + :param end_time: Required. the end timestamp of feedback time range, when equals to startTime + means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.AnomalyFeedbackValue :param anomaly_detection_configuration_id: the corresponding anomaly detection configuration of this feedback. @@ -626,6 +638,9 @@ class AnomalyFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -647,9 +662,9 @@ def __init__( *, metric_id: str, dimension_filter: "FeedbackDimensionFilter", - start_time: Optional[datetime.datetime] = None, - end_time: Optional[datetime.datetime] = None, - value: Optional["AnomalyFeedbackValue"] = None, + start_time: datetime.datetime, + end_time: datetime.datetime, + value: "AnomalyFeedbackValue", anomaly_detection_configuration_id: Optional[str] = None, anomaly_detection_configuration_snapshot: Optional["AnomalyDetectionConfiguration"] = None, **kwargs @@ -705,16 +720,24 @@ class AnomalyProperty(msrest.serialization.Model): only return for alerting anomaly result. Possible values include: "Active", "Resolved". :vartype anomaly_status: str or ~azure.ai.metricsadvisor.models.AnomalyStatus + :ivar value: value of the anomaly. + :vartype value: float + :ivar expected_value: expected value of the anomaly given by smart detector. + :vartype expected_value: float """ _validation = { 'anomaly_severity': {'required': True}, 'anomaly_status': {'readonly': True}, + 'value': {'readonly': True}, + 'expected_value': {'readonly': True}, } _attribute_map = { 'anomaly_severity': {'key': 'anomalySeverity', 'type': 'str'}, 'anomaly_status': {'key': 'anomalyStatus', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'float'}, + 'expected_value': {'key': 'expectedValue', 'type': 'float'}, } def __init__( @@ -726,6 +749,8 @@ def __init__( super(AnomalyProperty, self).__init__(**kwargs) self.anomaly_severity = anomaly_severity self.anomaly_status = None + self.value = None + self.expected_value = None class AnomalyResult(msrest.serialization.Model): @@ -835,7 +860,7 @@ class DataFeedDetail(msrest.serialization.Model): """DataFeedDetail. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureApplicationInsightsDataFeed, AzureBlobDataFeed, AzureCosmosDBDataFeed, AzureDataExplorerDataFeed, AzureDataLakeStorageGen2DataFeed, AzureTableDataFeed, ElasticsearchDataFeed, HttpRequestDataFeed, InfluxDBDataFeed, MongoDBDataFeed, MySqlDataFeed, PostgreSqlDataFeed, SQLServerDataFeed. + sub-classes are: AzureApplicationInsightsDataFeed, AzureBlobDataFeed, AzureCosmosDBDataFeed, AzureDataExplorerDataFeed, AzureDataLakeStorageGen2DataFeed, AzureEventHubsDataFeed, AzureTableDataFeed, ElasticsearchDataFeed, HttpRequestDataFeed, InfluxDBDataFeed, MongoDBDataFeed, MySqlDataFeed, PostgreSqlDataFeed, SQLServerDataFeed. Variables are only populated by the server, and will be ignored when sending a request. @@ -843,8 +868,8 @@ class DataFeedDetail(msrest.serialization.Model): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -909,6 +934,12 @@ class DataFeedDetail(msrest.serialization.Model): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str """ _validation = { @@ -957,10 +988,12 @@ class DataFeedDetail(msrest.serialization.Model): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, } _subtype_map = { - 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeed', 'AzureBlob': 'AzureBlobDataFeed', 'AzureCosmosDB': 'AzureCosmosDBDataFeed', 'AzureDataExplorer': 'AzureDataExplorerDataFeed', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeed', 'AzureTable': 'AzureTableDataFeed', 'Elasticsearch': 'ElasticsearchDataFeed', 'HttpRequest': 'HttpRequestDataFeed', 'InfluxDB': 'InfluxDBDataFeed', 'MongoDB': 'MongoDBDataFeed', 'MySql': 'MySqlDataFeed', 'PostgreSql': 'PostgreSqlDataFeed', 'SqlServer': 'SQLServerDataFeed'} + 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeed', 'AzureBlob': 'AzureBlobDataFeed', 'AzureCosmosDB': 'AzureCosmosDBDataFeed', 'AzureDataExplorer': 'AzureDataExplorerDataFeed', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeed', 'AzureEventHubs': 'AzureEventHubsDataFeed', 'AzureTable': 'AzureTableDataFeed', 'Elasticsearch': 'ElasticsearchDataFeed', 'HttpRequest': 'HttpRequestDataFeed', 'InfluxDB': 'InfluxDBDataFeed', 'MongoDB': 'MongoDBDataFeed', 'MySql': 'MySqlDataFeed', 'PostgreSql': 'PostgreSqlDataFeed', 'SqlServer': 'SQLServerDataFeed'} } def __init__( @@ -970,10 +1003,10 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -987,7 +1020,9 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): super(DataFeedDetail, self).__init__(**kwargs) @@ -1019,6 +1054,8 @@ def __init__( self.status = None self.created_time = None self.action_link_template = action_link_template + self.authentication_type = authentication_type + self.credential_id = credential_id class AzureApplicationInsightsDataFeed(DataFeedDetail): @@ -1030,8 +1067,8 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1096,7 +1133,13 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameter """ @@ -1115,6 +1158,7 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1146,6 +1190,8 @@ class AzureApplicationInsightsDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameter'}, } @@ -1156,10 +1202,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "AzureApplicationInsightsParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -1173,11 +1220,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["AzureApplicationInsightsParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureApplicationInsightsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureApplicationInsightsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureApplicationInsights' # type: str self.data_source_parameter = data_source_parameter @@ -1186,14 +1234,14 @@ class DataFeedDetailPatch(msrest.serialization.Model): """DataFeedDetailPatch. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureApplicationInsightsDataFeedPatch, AzureBlobDataFeedPatch, AzureCosmosDBDataFeedPatch, AzureDataExplorerDataFeedPatch, AzureDataLakeStorageGen2DataFeedPatch, AzureTableDataFeedPatch, ElasticsearchDataFeedPatch, HttpRequestDataFeedPatch, InfluxDBDataFeedPatch, MongoDBDataFeedPatch, MySqlDataFeedPatch, PostgreSqlDataFeedPatch, SQLServerDataFeedPatch. + sub-classes are: AzureApplicationInsightsDataFeedPatch, AzureBlobDataFeedPatch, AzureCosmosDBDataFeedPatch, AzureDataExplorerDataFeedPatch, AzureDataLakeStorageGen2DataFeedPatch, AzureEventHubsDataFeedPatch, AzureTableDataFeedPatch, ElasticsearchDataFeedPatch, HttpRequestDataFeedPatch, InfluxDBDataFeedPatch, MongoDBDataFeedPatch, MySqlDataFeedPatch, PostgreSqlDataFeedPatch, SQLServerDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1241,6 +1289,12 @@ class DataFeedDetailPatch(msrest.serialization.Model): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str """ _validation = { @@ -1271,10 +1325,12 @@ class DataFeedDetailPatch(msrest.serialization.Model): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, } _subtype_map = { - 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeedPatch', 'AzureBlob': 'AzureBlobDataFeedPatch', 'AzureCosmosDB': 'AzureCosmosDBDataFeedPatch', 'AzureDataExplorer': 'AzureDataExplorerDataFeedPatch', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureTable': 'AzureTableDataFeedPatch', 'Elasticsearch': 'ElasticsearchDataFeedPatch', 'HttpRequest': 'HttpRequestDataFeedPatch', 'InfluxDB': 'InfluxDBDataFeedPatch', 'MongoDB': 'MongoDBDataFeedPatch', 'MySql': 'MySqlDataFeedPatch', 'PostgreSql': 'PostgreSqlDataFeedPatch', 'SqlServer': 'SQLServerDataFeedPatch'} + 'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeedPatch', 'AzureBlob': 'AzureBlobDataFeedPatch', 'AzureCosmosDB': 'AzureCosmosDBDataFeedPatch', 'AzureDataExplorer': 'AzureDataExplorerDataFeedPatch', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureEventHubs': 'AzureEventHubsDataFeedPatch', 'AzureTable': 'AzureTableDataFeedPatch', 'Elasticsearch': 'ElasticsearchDataFeedPatch', 'HttpRequest': 'HttpRequestDataFeedPatch', 'InfluxDB': 'InfluxDBDataFeedPatch', 'MongoDB': 'MongoDBDataFeedPatch', 'MySql': 'MySqlDataFeedPatch', 'PostgreSql': 'PostgreSqlDataFeedPatch', 'SqlServer': 'SQLServerDataFeedPatch'} } def __init__( @@ -1299,6 +1355,8 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): super(DataFeedDetailPatch, self).__init__(**kwargs) @@ -1322,6 +1380,8 @@ def __init__( self.viewers = viewers self.status = status self.action_link_template = action_link_template + self.authentication_type = authentication_type + self.credential_id = credential_id class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): @@ -1331,8 +1391,8 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1380,6 +1440,12 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameter """ @@ -1412,6 +1478,8 @@ class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameter'}, } @@ -1437,10 +1505,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["AzureApplicationInsightsParameter"] = None, **kwargs ): - super(AzureApplicationInsightsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureApplicationInsightsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureApplicationInsights' # type: str self.data_source_parameter = data_source_parameter @@ -1499,8 +1569,8 @@ class AzureBlobDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1565,7 +1635,13 @@ class AzureBlobDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameter """ @@ -1584,6 +1660,7 @@ class AzureBlobDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1615,6 +1692,8 @@ class AzureBlobDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameter'}, } @@ -1625,10 +1704,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "AzureBlobParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -1642,11 +1722,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["AzureBlobParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureBlobDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureBlobDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureBlob' # type: str self.data_source_parameter = data_source_parameter @@ -1658,8 +1739,8 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -1707,6 +1788,12 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameter """ @@ -1739,6 +1826,8 @@ class AzureBlobDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameter'}, } @@ -1764,10 +1853,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["AzureBlobParameter"] = None, **kwargs ): - super(AzureBlobDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureBlobDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureBlob' # type: str self.data_source_parameter = data_source_parameter @@ -1820,8 +1911,8 @@ class AzureCosmosDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -1886,7 +1977,13 @@ class AzureCosmosDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameter """ @@ -1905,6 +2002,7 @@ class AzureCosmosDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -1936,6 +2034,8 @@ class AzureCosmosDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameter'}, } @@ -1946,10 +2046,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "AzureCosmosDBParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -1963,11 +2064,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["AzureCosmosDBParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureCosmosDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureCosmosDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureCosmosDB' # type: str self.data_source_parameter = data_source_parameter @@ -1979,8 +2081,8 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2028,6 +2130,12 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameter """ @@ -2060,6 +2168,8 @@ class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameter'}, } @@ -2085,10 +2195,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["AzureCosmosDBParameter"] = None, **kwargs ): - super(AzureCosmosDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureCosmosDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureCosmosDB' # type: str self.data_source_parameter = data_source_parameter @@ -2147,8 +2259,8 @@ class AzureDataExplorerDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -2213,7 +2325,13 @@ class AzureDataExplorerDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -2232,6 +2350,7 @@ class AzureDataExplorerDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -2263,6 +2382,8 @@ class AzureDataExplorerDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -2273,10 +2394,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "SqlSourceParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -2290,11 +2412,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["SqlSourceParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureDataExplorerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureDataExplorerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureDataExplorer' # type: str self.data_source_parameter = data_source_parameter @@ -2306,8 +2429,8 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2355,6 +2478,12 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -2387,6 +2516,8 @@ class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -2412,10 +2543,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["SqlSourceParameter"] = None, **kwargs ): - super(AzureDataExplorerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureDataExplorerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureDataExplorer' # type: str self.data_source_parameter = data_source_parameter @@ -2429,8 +2562,8 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -2495,7 +2628,13 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2Parameter """ @@ -2514,6 +2653,7 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -2545,6 +2685,8 @@ class AzureDataLakeStorageGen2DataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2Parameter'}, } @@ -2555,10 +2697,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "AzureDataLakeStorageGen2Parameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -2572,11 +2715,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["AzureDataLakeStorageGen2Parameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureDataLakeStorageGen2DataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureDataLakeStorageGen2DataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureDataLakeStorageGen2' # type: str self.data_source_parameter = data_source_parameter @@ -2588,8 +2732,8 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2637,6 +2781,12 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2Parameter """ @@ -2669,6 +2819,8 @@ class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2Parameter'}, } @@ -2694,10 +2846,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["AzureDataLakeStorageGen2Parameter"] = None, **kwargs ): - super(AzureDataLakeStorageGen2DataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureDataLakeStorageGen2DataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureDataLakeStorageGen2' # type: str self.data_source_parameter = data_source_parameter @@ -2709,7 +2863,7 @@ class AzureDataLakeStorageGen2Parameter(msrest.serialization.Model): :param account_name: Required. Account name. :type account_name: str - :param account_key: Required. Account key. + :param account_key: Account key. :type account_key: str :param file_system_name: Required. File system name (Container). :type file_system_name: str @@ -2721,7 +2875,6 @@ class AzureDataLakeStorageGen2Parameter(msrest.serialization.Model): _validation = { 'account_name': {'required': True}, - 'account_key': {'required': True}, 'file_system_name': {'required': True}, 'directory_template': {'required': True}, 'file_template': {'required': True}, @@ -2739,10 +2892,10 @@ def __init__( self, *, account_name: str, - account_key: str, file_system_name: str, directory_template: str, file_template: str, + account_key: Optional[str] = None, **kwargs ): super(AzureDataLakeStorageGen2Parameter, self).__init__(**kwargs) @@ -2753,6 +2906,561 @@ def __init__( self.file_template = file_template +class AzureEventHubsDataFeed(DataFeedDetail): + """AzureEventHubsDataFeed. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_type: Required. data source type.Constant filled by server. Possible values + include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :ivar data_feed_id: data feed unique id. + :vartype data_feed_id: str + :param data_feed_name: Required. data feed name. + :type data_feed_name: str + :param data_feed_description: data feed description. + :type data_feed_description: str + :param granularity_name: Required. granularity of the time series. Possible values include: + "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". + :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity + :param granularity_amount: if granularity is custom,it is required. + :type granularity_amount: int + :param metrics: Required. measure list. + :type metrics: list[~azure.ai.metricsadvisor.models.Metric] + :param dimension: dimension list. + :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] + :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time + of every time slice will be used as default value. + :type timestamp_column: str + :param data_start_from: Required. ingestion start time. + :type data_start_from: ~datetime.datetime + :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay + for every data slice according to this offset. + :type start_offset_in_seconds: long + :param max_concurrency: the max concurrency of data ingestion queries against user data source. + 0 means no limitation. + :type max_concurrency: int + :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. + :type min_retry_interval_in_seconds: long + :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first + schedule time in seconds. + :type stop_retry_after_in_seconds: long + :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", + "NeedRollup", "AlreadyRollup". + :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum + :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", + "Avg", "Count". + :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod + :param roll_up_columns: roll up columns. + :type roll_up_columns: list[str] + :param all_up_identification: the identification value for the row of calculated all-up value. + :type all_up_identification: str + :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible + values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". + :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType + :param fill_missing_point_value: the value of fill missing point for anomaly detection. + :type fill_missing_point_value: float + :param view_mode: data feed access mode, default is Private. Possible values include: + "Private", "Public". + :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode + :param admins: data feed administrator. + :type admins: list[str] + :param viewers: data feed viewer. + :type viewers: list[str] + :ivar is_admin: the query user is one of data feed administrator or not. + :vartype is_admin: bool + :ivar creator: data feed creator. + :vartype creator: str + :ivar status: data feed status. Possible values include: "Active", "Paused". + :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus + :ivar created_time: data feed created time. + :vartype created_time: ~datetime.datetime + :param action_link_template: action link for alert. + :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. + :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameter + """ + + _validation = { + 'data_source_type': {'required': True}, + 'data_feed_id': {'readonly': True}, + 'data_feed_name': {'required': True}, + 'granularity_name': {'required': True}, + 'metrics': {'required': True, 'unique': True}, + 'dimension': {'unique': True}, + 'data_start_from': {'required': True}, + 'roll_up_columns': {'unique': True}, + 'admins': {'unique': True}, + 'viewers': {'unique': True}, + 'is_admin': {'readonly': True}, + 'creator': {'readonly': True}, + 'status': {'readonly': True}, + 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, + } + + _attribute_map = { + 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, + 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, + 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, + 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, + 'granularity_name': {'key': 'granularityName', 'type': 'str'}, + 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, + 'metrics': {'key': 'metrics', 'type': '[Metric]'}, + 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, + 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, + 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, + 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, + 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, + 'need_rollup': {'key': 'needRollup', 'type': 'str'}, + 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, + 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, + 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, + 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, + 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, + 'view_mode': {'key': 'viewMode', 'type': 'str'}, + 'admins': {'key': 'admins', 'type': '[str]'}, + 'viewers': {'key': 'viewers', 'type': '[str]'}, + 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, + 'creator': {'key': 'creator', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, + 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, + 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameter'}, + } + + def __init__( + self, + *, + data_feed_name: str, + granularity_name: Union[str, "Granularity"], + metrics: List["Metric"], + data_start_from: datetime.datetime, + data_source_parameter: "AzureEventHubsParameter", + data_feed_description: Optional[str] = "", + granularity_amount: Optional[int] = None, + dimension: Optional[List["Dimension"]] = None, + timestamp_column: Optional[str] = "", + start_offset_in_seconds: Optional[int] = 0, + max_concurrency: Optional[int] = -1, + min_retry_interval_in_seconds: Optional[int] = -1, + stop_retry_after_in_seconds: Optional[int] = -1, + need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, + roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, + roll_up_columns: Optional[List[str]] = None, + all_up_identification: Optional[str] = None, + fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, + fill_missing_point_value: Optional[float] = None, + view_mode: Optional[Union[str, "ViewMode"]] = None, + admins: Optional[List[str]] = None, + viewers: Optional[List[str]] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, + **kwargs + ): + super(AzureEventHubsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) + self.data_source_type = 'AzureEventHubs' # type: str + self.data_source_parameter = data_source_parameter + + +class AzureEventHubsDataFeedPatch(DataFeedDetailPatch): + """AzureEventHubsDataFeedPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_type: Required. data source type.Constant filled by server. Possible values + include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_feed_name: data feed name. + :type data_feed_name: str + :param data_feed_description: data feed description. + :type data_feed_description: str + :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time + of every time slice will be used as default value. + :type timestamp_column: str + :param data_start_from: ingestion start time. + :type data_start_from: ~datetime.datetime + :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay + for every data slice according to this offset. + :type start_offset_in_seconds: long + :param max_concurrency: the max concurrency of data ingestion queries against user data source. + 0 means no limitation. + :type max_concurrency: int + :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. + :type min_retry_interval_in_seconds: long + :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first + schedule time in seconds. + :type stop_retry_after_in_seconds: long + :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", + "NeedRollup", "AlreadyRollup". + :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum + :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", + "Avg", "Count". + :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod + :param roll_up_columns: roll up columns. + :type roll_up_columns: list[str] + :param all_up_identification: the identification value for the row of calculated all-up value. + :type all_up_identification: str + :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible + values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". + :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType + :param fill_missing_point_value: the value of fill missing point for anomaly detection. + :type fill_missing_point_value: float + :param view_mode: data feed access mode, default is Private. Possible values include: + "Private", "Public". + :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode + :param admins: data feed administrator. + :type admins: list[str] + :param viewers: data feed viewer. + :type viewers: list[str] + :param status: data feed status. Possible values include: "Active", "Paused". + :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus + :param action_link_template: action link for alert. + :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: + :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameter + """ + + _validation = { + 'data_source_type': {'required': True}, + 'roll_up_columns': {'unique': True}, + 'admins': {'unique': True}, + 'viewers': {'unique': True}, + } + + _attribute_map = { + 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, + 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, + 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, + 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, + 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, + 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, + 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, + 'need_rollup': {'key': 'needRollup', 'type': 'str'}, + 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, + 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, + 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, + 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, + 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, + 'view_mode': {'key': 'viewMode', 'type': 'str'}, + 'admins': {'key': 'admins', 'type': '[str]'}, + 'viewers': {'key': 'viewers', 'type': '[str]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, + 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameter'}, + } + + def __init__( + self, + *, + data_feed_name: Optional[str] = None, + data_feed_description: Optional[str] = None, + timestamp_column: Optional[str] = None, + data_start_from: Optional[datetime.datetime] = None, + start_offset_in_seconds: Optional[int] = None, + max_concurrency: Optional[int] = None, + min_retry_interval_in_seconds: Optional[int] = None, + stop_retry_after_in_seconds: Optional[int] = None, + need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, + roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, + roll_up_columns: Optional[List[str]] = None, + all_up_identification: Optional[str] = None, + fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, + fill_missing_point_value: Optional[float] = None, + view_mode: Optional[Union[str, "ViewMode"]] = None, + admins: Optional[List[str]] = None, + viewers: Optional[List[str]] = None, + status: Optional[Union[str, "EntityStatus"]] = None, + action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, + data_source_parameter: Optional["AzureEventHubsParameter"] = None, + **kwargs + ): + super(AzureEventHubsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) + self.data_source_type = 'AzureEventHubs' # type: str + self.data_source_parameter = data_source_parameter + + +class AzureEventHubsParameter(msrest.serialization.Model): + """AzureEventHubsParameter. + + All required parameters must be populated in order to send to Azure. + + :param connection_string: Required. Azure Event Hubs connection string. + :type connection_string: str + :param consumer_group: Required. Azure Event Hubs consumer group. + :type consumer_group: str + """ + + _validation = { + 'connection_string': {'required': True}, + 'consumer_group': {'required': True}, + } + + _attribute_map = { + 'connection_string': {'key': 'connectionString', 'type': 'str'}, + 'consumer_group': {'key': 'consumerGroup', 'type': 'str'}, + } + + def __init__( + self, + *, + connection_string: str, + consumer_group: str, + **kwargs + ): + super(AzureEventHubsParameter, self).__init__(**kwargs) + self.connection_string = connection_string + self.consumer_group = consumer_group + + +class DataSourceCredential(msrest.serialization.Model): + """DataSourceCredential. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSQLConnectionStringCredential, DataLakeGen2SharedKeyCredential, ServicePrincipalCredential, ServicePrincipalInKVCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + } + + _subtype_map = { + 'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredential', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredential', 'ServicePrincipal': 'ServicePrincipalCredential', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredential'} + } + + def __init__( + self, + *, + data_source_credential_name: str, + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(DataSourceCredential, self).__init__(**kwargs) + self.data_source_credential_type = None # type: Optional[str] + self.data_source_credential_id = None + self.data_source_credential_name = data_source_credential_name + self.data_source_credential_description = data_source_credential_description + + +class AzureSQLConnectionStringCredential(DataSourceCredential): + """AzureSQLConnectionStringCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: str, + parameters: "AzureSQLConnectionStringParam", + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(AzureSQLConnectionStringCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'AzureSQLConnectionString' # type: str + self.parameters = parameters + + +class DataSourceCredentialPatch(msrest.serialization.Model): + """DataSourceCredentialPatch. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureSQLConnectionStringCredentialPatch, DataLakeGen2SharedKeyCredentialPatch, ServicePrincipalCredentialPatch, ServicePrincipalInKVCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + } + + _subtype_map = { + 'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredentialPatch', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredentialPatch', 'ServicePrincipal': 'ServicePrincipalCredentialPatch', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredentialPatch'} + } + + def __init__( + self, + *, + data_source_credential_name: Optional[str] = None, + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(DataSourceCredentialPatch, self).__init__(**kwargs) + self.data_source_credential_type = None # type: Optional[str] + self.data_source_credential_name = data_source_credential_name + self.data_source_credential_description = data_source_credential_description + + +class AzureSQLConnectionStringCredentialPatch(DataSourceCredentialPatch): + """AzureSQLConnectionStringCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: Optional[str] = None, + data_source_credential_description: Optional[str] = None, + parameters: Optional["AzureSQLConnectionStringParam"] = None, + **kwargs + ): + super(AzureSQLConnectionStringCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'AzureSQLConnectionString' # type: str + self.parameters = parameters + + +class AzureSQLConnectionStringParam(msrest.serialization.Model): + """AzureSQLConnectionStringParam. + + All required parameters must be populated in order to send to Azure. + + :param connection_string: Required. The connection string to access the Azure SQL. + :type connection_string: str + """ + + _validation = { + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'connection_string': {'key': 'connectionString', 'type': 'str'}, + } + + def __init__( + self, + *, + connection_string: str, + **kwargs + ): + super(AzureSQLConnectionStringParam, self).__init__(**kwargs) + self.connection_string = connection_string + + class AzureTableDataFeed(DataFeedDetail): """AzureTableDataFeed. @@ -2762,8 +3470,8 @@ class AzureTableDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -2828,7 +3536,13 @@ class AzureTableDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameter """ @@ -2847,6 +3561,7 @@ class AzureTableDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -2878,6 +3593,8 @@ class AzureTableDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameter'}, } @@ -2888,10 +3605,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "AzureTableParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -2905,11 +3623,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["AzureTableParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(AzureTableDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(AzureTableDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureTable' # type: str self.data_source_parameter = data_source_parameter @@ -2921,8 +3640,8 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -2970,6 +3689,12 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameter """ @@ -3002,6 +3727,8 @@ class AzureTableDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameter'}, } @@ -3027,10 +3754,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["AzureTableParameter"] = None, **kwargs ): - super(AzureTableDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(AzureTableDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'AzureTable' # type: str self.data_source_parameter = data_source_parameter @@ -3094,12 +3823,12 @@ class ChangePointFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param start_time: the start timestamp of feedback time range. + :param start_time: Required. the start timestamp of feedback time range. :type start_time: ~datetime.datetime - :param end_time: the end timestamp of feedback time range, when equals to startTime means only - one timestamp. + :param end_time: Required. the end timestamp of feedback time range, when equals to startTime + means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.ChangePointFeedbackValue """ @@ -3110,6 +3839,9 @@ class ChangePointFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -3129,9 +3861,9 @@ def __init__( *, metric_id: str, dimension_filter: "FeedbackDimensionFilter", - start_time: Optional[datetime.datetime] = None, - end_time: Optional[datetime.datetime] = None, - value: Optional["ChangePointFeedbackValue"] = None, + start_time: datetime.datetime, + end_time: datetime.datetime, + value: "ChangePointFeedbackValue", **kwargs ): super(ChangePointFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs) @@ -3249,7 +3981,7 @@ class CommentFeedback(MetricFeedback): :param end_time: the end timestamp of feedback time range, when equals to startTime means only one timestamp. :type end_time: ~datetime.datetime - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.CommentFeedbackValue """ @@ -3260,6 +3992,7 @@ class CommentFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -3279,9 +4012,9 @@ def __init__( *, metric_id: str, dimension_filter: "FeedbackDimensionFilter", + value: "CommentFeedbackValue", start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, - value: Optional["CommentFeedbackValue"] = None, **kwargs ): super(CommentFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs) @@ -3380,6 +4113,155 @@ def __init__( self.value = None +class DataLakeGen2SharedKeyCredential(DataSourceCredential): + """DataLakeGen2SharedKeyCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: str, + parameters: "DataLakeGen2SharedKeyParam", + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(DataLakeGen2SharedKeyCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'DataLakeGen2SharedKey' # type: str + self.parameters = parameters + + +class DataLakeGen2SharedKeyCredentialPatch(DataSourceCredentialPatch): + """DataLakeGen2SharedKeyCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: Optional[str] = None, + data_source_credential_description: Optional[str] = None, + parameters: Optional["DataLakeGen2SharedKeyParam"] = None, + **kwargs + ): + super(DataLakeGen2SharedKeyCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'DataLakeGen2SharedKey' # type: str + self.parameters = parameters + + +class DataLakeGen2SharedKeyParam(msrest.serialization.Model): + """DataLakeGen2SharedKeyParam. + + All required parameters must be populated in order to send to Azure. + + :param account_key: Required. The account key to access the Azure Data Lake Storage Gen2. + :type account_key: str + """ + + _validation = { + 'account_key': {'required': True}, + } + + _attribute_map = { + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__( + self, + *, + account_key: str, + **kwargs + ): + super(DataLakeGen2SharedKeyParam, self).__init__(**kwargs) + self.account_key = account_key + + +class DataSourceCredentialList(msrest.serialization.Model): + """DataSourceCredentialList. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar next_link: + :vartype next_link: str + :ivar value: + :vartype value: list[~azure.ai.metricsadvisor.models.DataSourceCredential] + """ + + _validation = { + 'next_link': {'readonly': True}, + 'value': {'readonly': True, 'unique': True}, + } + + _attribute_map = { + 'next_link': {'key': '@nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[DataSourceCredential]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataSourceCredentialList, self).__init__(**kwargs) + self.next_link = None + self.value = None + + class DetectionAnomalyFilterCondition(msrest.serialization.Model): """DetectionAnomalyFilterCondition. @@ -3670,8 +4552,8 @@ class ElasticsearchDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -3736,7 +4618,13 @@ class ElasticsearchDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.ElasticsearchParameter """ @@ -3755,6 +4643,7 @@ class ElasticsearchDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -3786,6 +4675,8 @@ class ElasticsearchDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'ElasticsearchParameter'}, } @@ -3796,10 +4687,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "ElasticsearchParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -3813,11 +4705,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["ElasticsearchParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(ElasticsearchDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(ElasticsearchDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'Elasticsearch' # type: str self.data_source_parameter = data_source_parameter @@ -3829,8 +4722,8 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -3878,6 +4771,12 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.ElasticsearchParameter """ @@ -3910,6 +4809,8 @@ class ElasticsearchDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'ElasticsearchParameter'}, } @@ -3935,10 +4836,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["ElasticsearchParameter"] = None, **kwargs ): - super(ElasticsearchDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(ElasticsearchDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'Elasticsearch' # type: str self.data_source_parameter = data_source_parameter @@ -4037,8 +4940,8 @@ def __init__( self, *, hook_name: str, - description: Optional[str] = None, - external_link: Optional[str] = None, + description: Optional[str] = "", + external_link: Optional[str] = "", **kwargs ): super(HookInfo, self).__init__(**kwargs) @@ -4070,7 +4973,7 @@ class EmailHookInfo(HookInfo): :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] - :param hook_parameter: + :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.EmailHookParameter """ @@ -4079,6 +4982,7 @@ class EmailHookInfo(HookInfo): 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, + 'hook_parameter': {'required': True}, } _attribute_map = { @@ -4095,9 +4999,9 @@ def __init__( self, *, hook_name: str, - description: Optional[str] = None, - external_link: Optional[str] = None, - hook_parameter: Optional["EmailHookParameter"] = None, + hook_parameter: "EmailHookParameter", + description: Optional[str] = "", + external_link: Optional[str] = "", **kwargs ): super(EmailHookInfo, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) @@ -4477,8 +5381,8 @@ class HttpRequestDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -4543,7 +5447,13 @@ class HttpRequestDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.HttpRequestParameter """ @@ -4562,6 +5472,7 @@ class HttpRequestDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -4593,6 +5504,8 @@ class HttpRequestDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'HttpRequestParameter'}, } @@ -4603,10 +5516,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "HttpRequestParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -4620,11 +5534,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["HttpRequestParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(HttpRequestDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(HttpRequestDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'HttpRequest' # type: str self.data_source_parameter = data_source_parameter @@ -4636,8 +5551,8 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -4685,6 +5600,12 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.HttpRequestParameter """ @@ -4717,6 +5638,8 @@ class HttpRequestDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'HttpRequestParameter'}, } @@ -4742,10 +5665,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["HttpRequestParameter"] = None, **kwargs ): - super(HttpRequestDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(HttpRequestDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'HttpRequest' # type: str self.data_source_parameter = data_source_parameter @@ -4809,16 +5734,24 @@ class IncidentProperty(msrest.serialization.Model): only return for alerting incident result. Possible values include: "Active", "Resolved". :vartype incident_status: str or ~azure.ai.metricsadvisor.models.IncidentStatus + :ivar value_of_root_node: value of the root node. + :vartype value_of_root_node: float + :ivar expected_value_of_root_node: expected value of the root node given by smart detector. + :vartype expected_value_of_root_node: float """ _validation = { 'max_severity': {'required': True}, 'incident_status': {'readonly': True}, + 'value_of_root_node': {'readonly': True}, + 'expected_value_of_root_node': {'readonly': True}, } _attribute_map = { 'max_severity': {'key': 'maxSeverity', 'type': 'str'}, 'incident_status': {'key': 'incidentStatus', 'type': 'str'}, + 'value_of_root_node': {'key': 'valueOfRootNode', 'type': 'float'}, + 'expected_value_of_root_node': {'key': 'expectedValueOfRootNode', 'type': 'float'}, } def __init__( @@ -4830,6 +5763,8 @@ def __init__( super(IncidentProperty, self).__init__(**kwargs) self.max_severity = max_severity self.incident_status = None + self.value_of_root_node = None + self.expected_value_of_root_node = None class IncidentResult(msrest.serialization.Model): @@ -4942,8 +5877,8 @@ class InfluxDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -5008,7 +5943,13 @@ class InfluxDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameter """ @@ -5027,6 +5968,7 @@ class InfluxDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -5058,6 +6000,8 @@ class InfluxDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameter'}, } @@ -5068,10 +6012,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "InfluxDBParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -5085,11 +6030,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["InfluxDBParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(InfluxDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(InfluxDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'InfluxDB' # type: str self.data_source_parameter = data_source_parameter @@ -5101,8 +6047,8 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -5150,6 +6096,12 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameter """ @@ -5182,6 +6134,8 @@ class InfluxDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameter'}, } @@ -5207,10 +6161,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["InfluxDBParameter"] = None, **kwargs ): - super(InfluxDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(InfluxDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'InfluxDB' # type: str self.data_source_parameter = data_source_parameter @@ -5488,7 +6444,7 @@ def __init__( *, anomaly_detection_configuration_id: str, anomaly_scope_type: Union[str, "AnomalyScope"], - negation_operation: Optional[bool] = None, + negation_operation: Optional[bool] = False, dimension_anomaly_scope: Optional["DimensionGroupIdentity"] = None, top_n_anomaly_scope: Optional["TopNGroupScope"] = None, severity_filter: Optional["SeverityCondition"] = None, @@ -5573,11 +6529,11 @@ class MetricDataQueryOptions(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param start_time: Required. start time of query a time series data, and format should be yyyy- - MM-ddThh:mm:ssZ. + :param start_time: Required. start time of query a time series data, and format should be + yyyy-MM-ddThh:mm:ssZ. :type start_time: ~datetime.datetime - :param end_time: Required. start time of query a time series data, and format should be yyyy- - MM-ddThh:mm:ssZ. + :param end_time: Required. start time of query a time series data, and format should be + yyyy-MM-ddThh:mm:ssZ. :type end_time: ~datetime.datetime :param series: Required. query specific series. :type series: list[dict[str, str]] @@ -5857,8 +6813,8 @@ class MongoDBDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -5923,7 +6879,13 @@ class MongoDBDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameter """ @@ -5942,6 +6904,7 @@ class MongoDBDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -5973,6 +6936,8 @@ class MongoDBDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameter'}, } @@ -5983,10 +6948,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "MongoDBParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -6000,11 +6966,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["MongoDBParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(MongoDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(MongoDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MongoDB' # type: str self.data_source_parameter = data_source_parameter @@ -6016,8 +6983,8 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -6065,6 +7032,12 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameter """ @@ -6097,6 +7070,8 @@ class MongoDBDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameter'}, } @@ -6122,10 +7097,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["MongoDBParameter"] = None, **kwargs ): - super(MongoDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(MongoDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MongoDB' # type: str self.data_source_parameter = data_source_parameter @@ -6178,8 +7155,8 @@ class MySqlDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -6244,7 +7221,13 @@ class MySqlDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6263,6 +7246,7 @@ class MySqlDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -6294,6 +7278,8 @@ class MySqlDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6304,10 +7290,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "SqlSourceParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -6321,11 +7308,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["SqlSourceParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(MySqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(MySqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MySql' # type: str self.data_source_parameter = data_source_parameter @@ -6337,8 +7325,8 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -6386,6 +7374,12 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6418,6 +7412,8 @@ class MySqlDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6443,10 +7439,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["SqlSourceParameter"] = None, **kwargs ): - super(MySqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(MySqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MySql' # type: str self.data_source_parameter = data_source_parameter @@ -6471,7 +7469,7 @@ class PeriodFeedback(MetricFeedback): :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter - :param value: + :param value: Required. :type value: ~azure.ai.metricsadvisor.models.PeriodFeedbackValue """ @@ -6482,6 +7480,7 @@ class PeriodFeedback(MetricFeedback): 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, + 'value': {'required': True}, } _attribute_map = { @@ -6499,7 +7498,7 @@ def __init__( *, metric_id: str, dimension_filter: "FeedbackDimensionFilter", - value: Optional["PeriodFeedbackValue"] = None, + value: "PeriodFeedbackValue", **kwargs ): super(PeriodFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs) @@ -6551,8 +7550,8 @@ class PostgreSqlDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -6617,7 +7616,13 @@ class PostgreSqlDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6636,6 +7641,7 @@ class PostgreSqlDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -6667,6 +7673,8 @@ class PostgreSqlDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6677,10 +7685,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "SqlSourceParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -6694,11 +7703,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["SqlSourceParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(PostgreSqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(PostgreSqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'PostgreSql' # type: str self.data_source_parameter = data_source_parameter @@ -6710,8 +7720,8 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -6759,6 +7769,12 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -6791,6 +7807,8 @@ class PostgreSqlDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -6816,10 +7834,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["SqlSourceParameter"] = None, **kwargs ): - super(PostgreSqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(PostgreSqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'PostgreSql' # type: str self.data_source_parameter = data_source_parameter @@ -7072,6 +8092,288 @@ def __init__( self.value = value +class ServicePrincipalCredential(DataSourceCredential): + """ServicePrincipalCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: str, + parameters: "ServicePrincipalParam", + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'ServicePrincipal' # type: str + self.parameters = parameters + + +class ServicePrincipalCredentialPatch(DataSourceCredentialPatch): + """ServicePrincipalCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: Optional[str] = None, + data_source_credential_description: Optional[str] = None, + parameters: Optional["ServicePrincipalParam"] = None, + **kwargs + ): + super(ServicePrincipalCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'ServicePrincipal' # type: str + self.parameters = parameters + + +class ServicePrincipalInKVCredential(DataSourceCredential): + """ServicePrincipalInKVCredential. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type data_source_credential_type: str or + ~azure.ai.metricsadvisor.models.DataSourceCredentialType + :ivar data_source_credential_id: Unique id of data source credential. + :vartype data_source_credential_id: str + :param data_source_credential_name: Required. Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: Required. + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + 'data_source_credential_id': {'readonly': True}, + 'data_source_credential_name': {'required': True}, + 'parameters': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: str, + parameters: "ServicePrincipalInKVParam", + data_source_credential_description: Optional[str] = None, + **kwargs + ): + super(ServicePrincipalInKVCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'ServicePrincipalInKV' # type: str + self.parameters = parameters + + +class ServicePrincipalInKVCredentialPatch(DataSourceCredentialPatch): + """ServicePrincipalInKVCredentialPatch. + + All required parameters must be populated in order to send to Azure. + + :param data_source_credential_type: Required. Type of data source credential.Constant filled by + server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", + "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", + "Elasticsearch", "HttpRequest", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". + :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceType + :param data_source_credential_name: Name of data source credential. + :type data_source_credential_name: str + :param data_source_credential_description: Description of data source credential. + :type data_source_credential_description: str + :param parameters: + :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParam + """ + + _validation = { + 'data_source_credential_type': {'required': True}, + } + + _attribute_map = { + 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, + 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, + 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParam'}, + } + + def __init__( + self, + *, + data_source_credential_name: Optional[str] = None, + data_source_credential_description: Optional[str] = None, + parameters: Optional["ServicePrincipalInKVParam"] = None, + **kwargs + ): + super(ServicePrincipalInKVCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) + self.data_source_credential_type = 'ServicePrincipalInKV' # type: str + self.parameters = parameters + + +class ServicePrincipalInKVParam(msrest.serialization.Model): + """ServicePrincipalInKVParam. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_endpoint: Required. The Key Vault endpoint that storing the service principal. + :type key_vault_endpoint: str + :param key_vault_client_id: Required. The Client Id to access the Key Vault. + :type key_vault_client_id: str + :param key_vault_client_secret: Required. The Client Secret to access the Key Vault. + :type key_vault_client_secret: str + :param service_principal_id_name_in_kv: Required. The secret name of the service principal's + client Id in the Key Vault. + :type service_principal_id_name_in_kv: str + :param service_principal_secret_name_in_kv: Required. The secret name of the service + principal's client secret in the Key Vault. + :type service_principal_secret_name_in_kv: str + :param tenant_id: Required. The tenant id of your service principal. + :type tenant_id: str + """ + + _validation = { + 'key_vault_endpoint': {'required': True}, + 'key_vault_client_id': {'required': True}, + 'key_vault_client_secret': {'required': True}, + 'service_principal_id_name_in_kv': {'required': True}, + 'service_principal_secret_name_in_kv': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'key_vault_endpoint': {'key': 'keyVaultEndpoint', 'type': 'str'}, + 'key_vault_client_id': {'key': 'keyVaultClientId', 'type': 'str'}, + 'key_vault_client_secret': {'key': 'keyVaultClientSecret', 'type': 'str'}, + 'service_principal_id_name_in_kv': {'key': 'servicePrincipalIdNameInKV', 'type': 'str'}, + 'service_principal_secret_name_in_kv': {'key': 'servicePrincipalSecretNameInKV', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + key_vault_endpoint: str, + key_vault_client_id: str, + key_vault_client_secret: str, + service_principal_id_name_in_kv: str, + service_principal_secret_name_in_kv: str, + tenant_id: str, + **kwargs + ): + super(ServicePrincipalInKVParam, self).__init__(**kwargs) + self.key_vault_endpoint = key_vault_endpoint + self.key_vault_client_id = key_vault_client_id + self.key_vault_client_secret = key_vault_client_secret + self.service_principal_id_name_in_kv = service_principal_id_name_in_kv + self.service_principal_secret_name_in_kv = service_principal_secret_name_in_kv + self.tenant_id = tenant_id + + +class ServicePrincipalParam(msrest.serialization.Model): + """ServicePrincipalParam. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. The client id of the service principal. + :type client_id: str + :param client_secret: Required. The client secret of the service principal. + :type client_secret: str + :param tenant_id: Required. The tenant id of the service principal. + :type tenant_id: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: str, + client_secret: str, + tenant_id: str, + **kwargs + ): + super(ServicePrincipalParam, self).__init__(**kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.tenant_id = tenant_id + + class SeverityCondition(msrest.serialization.Model): """SeverityCondition. @@ -7190,8 +8492,8 @@ class SQLServerDataFeed(DataFeedDetail): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str @@ -7256,7 +8558,13 @@ class SQLServerDataFeed(DataFeedDetail): :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str - :param data_source_parameter: + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str + :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -7275,6 +8583,7 @@ class SQLServerDataFeed(DataFeedDetail): 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, + 'data_source_parameter': {'required': True}, } _attribute_map = { @@ -7306,6 +8615,8 @@ class SQLServerDataFeed(DataFeedDetail): 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -7316,10 +8627,11 @@ def __init__( granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, - data_feed_description: Optional[str] = None, + data_source_parameter: "SqlSourceParameter", + data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, - timestamp_column: Optional[str] = None, + timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, @@ -7333,11 +8645,12 @@ def __init__( view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, - action_link_template: Optional[str] = None, - data_source_parameter: Optional["SqlSourceParameter"] = None, + action_link_template: Optional[str] = "", + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, **kwargs ): - super(SQLServerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, **kwargs) + super(SQLServerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'SqlServer' # type: str self.data_source_parameter = data_source_parameter @@ -7349,8 +8662,8 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", - "AzureDataLakeStorageGen2", "AzureTable", "Elasticsearch", "HttpRequest", "InfluxDB", - "MongoDB", "MySql", "PostgreSql", "SqlServer". + "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureTable", "Elasticsearch", "HttpRequest", + "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str @@ -7398,6 +8711,12 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str + :param authentication_type: authentication type for corresponding data source. Possible values + include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", + "ServicePrincipal", "ServicePrincipalInKV". + :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum + :param credential_id: The credential entity id. + :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ @@ -7430,6 +8749,8 @@ class SQLServerDataFeedPatch(DataFeedDetailPatch): 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } @@ -7455,10 +8776,12 @@ def __init__( viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, + authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, + credential_id: Optional[str] = None, data_source_parameter: Optional["SqlSourceParameter"] = None, **kwargs ): - super(SQLServerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, **kwargs) + super(SQLServerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'SqlServer' # type: str self.data_source_parameter = data_source_parameter @@ -7468,14 +8791,13 @@ class SqlSourceParameter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param connection_string: Required. Database connection string. + :param connection_string: Database connection string. :type connection_string: str :param query: Required. Query script. :type query: str """ _validation = { - 'connection_string': {'required': True}, 'query': {'required': True}, } @@ -7487,8 +8809,8 @@ class SqlSourceParameter(msrest.serialization.Model): def __init__( self, *, - connection_string: str, query: str, + connection_string: Optional[str] = None, **kwargs ): super(SqlSourceParameter, self).__init__(**kwargs) @@ -7631,6 +8953,9 @@ class ValueCondition(msrest.serialization.Model): :param direction: Required. value filter direction. Possible values include: "Both", "Down", "Up". :type direction: str or ~azure.ai.metricsadvisor.models.Direction + :param type: data used to implement value filter. Possible values include: "Value", "Mean". + Default value: "Value". + :type type: str or ~azure.ai.metricsadvisor.models.ValueType :param metric_id: the other metric unique id used for value filter. :type metric_id: str :param trigger_for_missing: trigger alert when the corresponding point is missing in the other @@ -7648,6 +8973,7 @@ class ValueCondition(msrest.serialization.Model): 'lower': {'key': 'lower', 'type': 'float'}, 'upper': {'key': 'upper', 'type': 'float'}, 'direction': {'key': 'direction', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'trigger_for_missing': {'key': 'triggerForMissing', 'type': 'bool'}, } @@ -7658,6 +8984,7 @@ def __init__( direction: Union[str, "Direction"], lower: Optional[float] = None, upper: Optional[float] = None, + type: Optional[Union[str, "ValueType"]] = "Value", metric_id: Optional[str] = None, trigger_for_missing: Optional[bool] = None, **kwargs @@ -7666,6 +8993,7 @@ def __init__( self.lower = lower self.upper = upper self.direction = direction + self.type = type self.metric_id = metric_id self.trigger_for_missing = trigger_for_missing @@ -7690,7 +9018,7 @@ class WebhookHookInfo(HookInfo): :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] - :param hook_parameter: + :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.WebhookHookParameter """ @@ -7699,6 +9027,7 @@ class WebhookHookInfo(HookInfo): 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, + 'hook_parameter': {'required': True}, } _attribute_map = { @@ -7715,9 +9044,9 @@ def __init__( self, *, hook_name: str, - description: Optional[str] = None, - external_link: Optional[str] = None, - hook_parameter: Optional["WebhookHookParameter"] = None, + hook_parameter: "WebhookHookParameter", + description: Optional[str] = "", + external_link: Optional[str] = "", **kwargs ): super(WebhookHookInfo, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py index 91adf180e131..940f19c198e5 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_generated/operations/_azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_operations.py @@ -311,7 +311,7 @@ def get_alerts_by_anomaly_alerting_configuration( configuration_id, # type: str body, # type: "_models.AlertingResultQuery" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.AlertResultList"] @@ -325,8 +325,8 @@ def get_alerts_by_anomaly_alerting_configuration( :type body: ~azure.ai.metricsadvisor.models.AlertingResultQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AlertResultList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.AlertResultList] @@ -358,8 +358,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'AlertingResultQuery') @@ -411,7 +411,7 @@ def get_anomalies_from_alert_by_anomaly_alerting_configuration( configuration_id, # type: str alert_id, # type: str skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.AnomalyResultList"] @@ -425,8 +425,8 @@ def get_anomalies_from_alert_by_anomaly_alerting_configuration( :type alert_id: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyResultList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList] @@ -457,8 +457,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -503,7 +503,7 @@ def get_incidents_from_alert_by_anomaly_alerting_configuration( configuration_id, # type: str alert_id, # type: str skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.IncidentResultList"] @@ -517,8 +517,8 @@ def get_incidents_from_alert_by_anomaly_alerting_configuration( :type alert_id: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IncidentResultList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList] @@ -549,8 +549,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -990,7 +990,7 @@ def get_anomalies_by_anomaly_detection_configuration( configuration_id, # type: str body, # type: "_models.DetectionAnomalyResultQuery" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.AnomalyResultList"] @@ -1004,8 +1004,8 @@ def get_anomalies_by_anomaly_detection_configuration( :type body: ~azure.ai.metricsadvisor.models.DetectionAnomalyResultQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyResultList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.AnomalyResultList] @@ -1037,8 +1037,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'DetectionAnomalyResultQuery') @@ -1090,7 +1090,7 @@ def get_dimension_of_anomalies_by_anomaly_detection_configuration( configuration_id, # type: str body, # type: "_models.AnomalyDimensionQuery" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.AnomalyDimensionList"] @@ -1104,8 +1104,8 @@ def get_dimension_of_anomalies_by_anomaly_detection_configuration( :type body: ~azure.ai.metricsadvisor.models.AnomalyDimensionQuery :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AnomalyDimensionList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.AnomalyDimensionList] @@ -1137,8 +1137,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'AnomalyDimensionQuery') @@ -1189,7 +1189,7 @@ def get_incidents_by_anomaly_detection_configuration( self, configuration_id, # type: str body, # type: "_models.DetectionIncidentResultQuery" - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.IncidentResultList"] @@ -1201,8 +1201,8 @@ def get_incidents_by_anomaly_detection_configuration( :type configuration_id: str :param body: query detection incident result request. :type body: ~azure.ai.metricsadvisor.models.DetectionIncidentResultQuery - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IncidentResultList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.IncidentResultList] @@ -1232,8 +1232,8 @@ def prepare_request(next_link=None): url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'DetectionIncidentResultQuery') @@ -1281,7 +1281,7 @@ def get_next(next_link=None): def get_incidents_by_anomaly_detection_configuration_next_pages( self, configuration_id, # type: str - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] token=None, # type: Optional[str] **kwargs # type: Any ): @@ -1292,8 +1292,8 @@ def get_incidents_by_anomaly_detection_configuration_next_pages( :param configuration_id: anomaly detection configuration unique id. :type configuration_id: str - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :param token: the token for getting the next page. :type token: str :keyword callable cls: A custom type or function that will be passed the direct response @@ -1323,8 +1323,8 @@ def prepare_request(next_link=None): url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') if token is not None: query_parameters['$token'] = self._serialize.query("token", token, 'str') @@ -1447,6 +1447,318 @@ def get_next(next_link=None): ) get_root_cause_of_incident_by_anomaly_detection_configuration.metadata = {'url': '/enrichment/anomalyDetection/configurations/{configurationId}/incidents/{incidentId}/rootCause'} # type: ignore + def create_credential( + self, + body, # type: "_models.DataSourceCredential" + **kwargs # type: Any + ): + # type: (...) -> None + """Create a new data source credential. + + Create a new data source credential. + + :param body: Create data source credential request. + :type body: ~azure.ai.metricsadvisor.models.DataSourceCredential + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'DataSourceCredential') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create_credential.metadata = {'url': '/credentials'} # type: ignore + + def list_credentials( + self, + skip=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.DataSourceCredentialList"] + """List all credentials. + + List all credentials. + + :param skip: for paging, skipped number. + :type skip: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataSourceCredentialList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.DataSourceCredentialList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredentialList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DataSourceCredentialList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_credentials.metadata = {'url': '/credentials'} # type: ignore + + def update_credential( + self, + credential_id, # type: str + body, # type: "_models.DataSourceCredentialPatch" + **kwargs # type: Any + ): + # type: (...) -> None + """Update a data source credential. + + Update a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :param body: Update data source credential request. + :type body: ~azure.ai.metricsadvisor.models.DataSourceCredentialPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + content_type = kwargs.pop("content_type", "application/merge-patch+json") + accept = "application/json" + + # Construct URL + url = self.update_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'DataSourceCredentialPatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + update_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + + def delete_credential( + self, + credential_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a data source credential. + + Delete a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.delete_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + delete_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + + def get_credential( + self, + credential_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.DataSourceCredential" + """Get a data source credential. + + Get a data source credential. + + :param credential_id: Data source credential unique ID. + :type credential_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSourceCredential, or the result of cls(response) + :rtype: ~azure.ai.metricsadvisor.models.DataSourceCredential + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataSourceCredential"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_credential.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'credentialId': self._serialize.url("credential_id", credential_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorCode, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('DataSourceCredential', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_credential.metadata = {'url': '/credentials/{credentialId}'} # type: ignore + def list_data_feeds( self, data_feed_name=None, # type: Optional[str] @@ -1455,7 +1767,7 @@ def list_data_feeds( status=None, # type: Optional[Union[str, "_models.EntityStatus"]] creator=None, # type: Optional[str] skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.DataFeedList"] @@ -1475,8 +1787,8 @@ def list_data_feeds( :type creator: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataFeedList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.DataFeedList] @@ -1515,8 +1827,8 @@ def prepare_request(next_link=None): query_parameters['creator'] = self._serialize.query("creator", creator, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -1844,7 +2156,7 @@ def list_metric_feedbacks( self, body, # type: "_models.MetricFeedbackFilter" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.MetricFeedbackList"] @@ -1856,8 +2168,8 @@ def list_metric_feedbacks( :type body: ~azure.ai.metricsadvisor.models.MetricFeedbackFilter :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricFeedbackList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.MetricFeedbackList] @@ -1888,8 +2200,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricFeedbackFilter') @@ -2000,7 +2312,7 @@ def list_hooks( self, hook_name=None, # type: Optional[str] skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.HookList"] @@ -2012,8 +2324,8 @@ def list_hooks( :type hook_name: str :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either HookList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.HookList] @@ -2044,8 +2356,8 @@ def prepare_request(next_link=None): query_parameters['hookName'] = self._serialize.query("hook_name", hook_name, 'str') if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') request = self._client.get(url, query_parameters, header_parameters) else: @@ -2318,7 +2630,7 @@ def get_data_feed_ingestion_status( data_feed_id, # type: str body, # type: "_models.IngestionStatusQueryOptions" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.IngestionStatusList"] @@ -2332,8 +2644,8 @@ def get_data_feed_ingestion_status( :type body: ~azure.ai.metricsadvisor.models.IngestionStatusQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IngestionStatusList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.IngestionStatusList] @@ -2365,8 +2677,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'IngestionStatusQueryOptions') @@ -2623,7 +2935,7 @@ def get_metric_series( metric_id, # type: str body, # type: "_models.MetricSeriesQueryOptions" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.MetricSeriesList"] @@ -2637,8 +2949,8 @@ def get_metric_series( :type body: ~azure.ai.metricsadvisor.models.MetricSeriesQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricSeriesList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.MetricSeriesList] @@ -2670,8 +2982,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricSeriesQueryOptions') @@ -2723,7 +3035,7 @@ def get_metric_dimension( metric_id, # type: str body, # type: "_models.MetricDimensionQueryOptions" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.MetricDimensionList"] @@ -2737,8 +3049,8 @@ def get_metric_dimension( :type body: ~azure.ai.metricsadvisor.models.MetricDimensionQueryOptions :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MetricDimensionList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.MetricDimensionList] @@ -2770,8 +3082,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MetricDimensionQueryOptions') @@ -2900,7 +3212,7 @@ def get_enrichment_status_by_metric( metric_id, # type: str body, # type: "_models.EnrichmentStatusQueryOption" skip=None, # type: Optional[int] - top=None, # type: Optional[int] + maxpagesize=None, # type: Optional[int] **kwargs # type: Any ): # type: (...) -> Iterable["_models.EnrichmentStatusList"] @@ -2914,8 +3226,8 @@ def get_enrichment_status_by_metric( :type body: ~azure.ai.metricsadvisor.models.EnrichmentStatusQueryOption :param skip: for paging, skipped number. :type skip: int - :param top: for paging, item number in response. - :type top: int + :param maxpagesize: the maximum number of items in one page. + :type maxpagesize: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnrichmentStatusList or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatusList] @@ -2947,8 +3259,8 @@ def prepare_request(next_link=None): query_parameters = {} # type: Dict[str, Any] if skip is not None: query_parameters['$skip'] = self._serialize.query("skip", skip, 'int') - if top is not None: - query_parameters['$top'] = self._serialize.query("top", top, 'int') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'int') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'EnrichmentStatusQueryOption')