From c2371e78b806f63e45fee171c37e84d98286c568 Mon Sep 17 00:00:00 2001 From: SDK Automation Date: Tue, 11 Feb 2020 18:58:27 +0000 Subject: [PATCH] Generated from ffc5d5bf5fa367715f96756aa040ade35f650cc4 Fixed prettier code style issue (I believe a whitespace change) --- .../_data_factory_management_client.py | 5 - .../azure/mgmt/datafactory/models/__init__.py | 34 +- .../_data_factory_management_client_enums.py | 90 ++-- .../azure/mgmt/datafactory/models/_models.py | 145 +++--- .../mgmt/datafactory/models/_models_py3.py | 147 +++--- .../mgmt/datafactory/models/_paged_models.py | 13 - .../mgmt/datafactory/operations/__init__.py | 2 - .../operations/_pipelines_operations.py | 8 +- .../operations/_rerun_triggers_operations.py | 453 ------------------ .../operations/_triggers_operations.py | 73 +++ 10 files changed, 260 insertions(+), 710 deletions(-) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index ec8185523fbd..78ff241e7898 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -26,7 +26,6 @@ from .operations import ActivityRunsOperations from .operations import TriggersOperations from .operations import TriggerRunsOperations -from .operations import RerunTriggersOperations from .operations import DataFlowsOperations from .operations import DataFlowDebugSessionOperations from . import models @@ -64,8 +63,6 @@ class DataFactoryManagementClient(SDKClient): :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations - :ivar rerun_triggers: RerunTriggers operations - :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar data_flows: DataFlows operations :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSession operations @@ -116,8 +113,6 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) - self.rerun_triggers = RerunTriggersOperations( - self._client, self.config, self._serialize, self._deserialize) self.data_flows = DataFlowsOperations( self._client, self.config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 4e9756b6aad1..e32f8ab31133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -426,9 +426,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset - from ._models_py3 import RerunTriggerResource from ._models_py3 import RerunTumblingWindowTrigger - from ._models_py3 import RerunTumblingWindowTriggerActionParameters from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService from ._models_py3 import ResponsysObjectDataset @@ -546,7 +544,9 @@ from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyReference + from ._models_py3 import TriggerFilterParameters from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerQueryResponse from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun @@ -998,9 +998,7 @@ from ._models import RedshiftUnloadSettings from ._models import RelationalSource from ._models import RelationalTableDataset - from ._models import RerunTriggerResource from ._models import RerunTumblingWindowTrigger - from ._models import RerunTumblingWindowTriggerActionParameters from ._models import Resource from ._models import ResponsysLinkedService from ._models import ResponsysObjectDataset @@ -1118,7 +1116,9 @@ from ._models import Transformation from ._models import Trigger from ._models import TriggerDependencyReference + from ._models import TriggerFilterParameters from ._models import TriggerPipelineReference + from ._models import TriggerQueryResponse from ._models import TriggerReference from ._models import TriggerResource from ._models import TriggerRun @@ -1161,7 +1161,6 @@ from ._paged_models import LinkedServiceResourcePaged from ._paged_models import OperationPaged from ._paged_models import PipelineResourcePaged -from ._paged_models import RerunTriggerResourcePaged from ._paged_models import TriggerResourcePaged from ._data_factory_management_client_enums import ( IntegrationRuntimeState, @@ -1177,11 +1176,6 @@ RunQueryOrder, TriggerRunStatus, DataFlowDebugCommandType, - TumblingWindowFrequency, - BlobEventTypes, - DayOfWeek, - DaysOfWeek, - RecurrenceFrequency, GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, @@ -1210,6 +1204,11 @@ DynamicsAuthenticationType, OrcCompressionCodec, AvroCompressionCodec, + TumblingWindowFrequency, + BlobEventTypes, + DayOfWeek, + DaysOfWeek, + RecurrenceFrequency, DataFlowComputeType, AzureFunctionActivityMethod, WebActivityMethod, @@ -1659,9 +1658,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', - 'RerunTriggerResource', 'RerunTumblingWindowTrigger', - 'RerunTumblingWindowTriggerActionParameters', 'Resource', 'ResponsysLinkedService', 'ResponsysObjectDataset', @@ -1779,7 +1776,9 @@ 'Transformation', 'Trigger', 'TriggerDependencyReference', + 'TriggerFilterParameters', 'TriggerPipelineReference', + 'TriggerQueryResponse', 'TriggerReference', 'TriggerResource', 'TriggerRun', @@ -1821,7 +1820,6 @@ 'DatasetResourcePaged', 'PipelineResourcePaged', 'TriggerResourcePaged', - 'RerunTriggerResourcePaged', 'DataFlowResourcePaged', 'DataFlowDebugSessionInfoPaged', 'IntegrationRuntimeState', @@ -1837,11 +1835,6 @@ 'RunQueryOrder', 'TriggerRunStatus', 'DataFlowDebugCommandType', - 'TumblingWindowFrequency', - 'BlobEventTypes', - 'DayOfWeek', - 'DaysOfWeek', - 'RecurrenceFrequency', 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', @@ -1870,6 +1863,11 @@ 'DynamicsAuthenticationType', 'OrcCompressionCodec', 'AvroCompressionCodec', + 'TumblingWindowFrequency', + 'BlobEventTypes', + 'DayOfWeek', + 'DaysOfWeek', + 'RecurrenceFrequency', 'DataFlowComputeType', 'AzureFunctionActivityMethod', 'WebActivityMethod', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 95741bbf0fa1..e30a4c509021 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -131,51 +131,6 @@ class DataFlowDebugCommandType(str, Enum): execute_expression_query = "executeExpressionQuery" -class TumblingWindowFrequency(str, Enum): - - minute = "Minute" - hour = "Hour" - - -class BlobEventTypes(str, Enum): - - microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" - microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" - - -class DayOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class DaysOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class RecurrenceFrequency(str, Enum): - - not_specified = "NotSpecified" - minute = "Minute" - hour = "Hour" - day = "Day" - week = "Week" - month = "Month" - year = "Year" - - class GoogleAdWordsAuthenticationType(str, Enum): service_authentication = "ServiceAuthentication" @@ -366,6 +321,51 @@ class AvroCompressionCodec(str, Enum): bzip2 = "bzip2" +class TumblingWindowFrequency(str, Enum): + + minute = "Minute" + hour = "Hour" + + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + + +class DayOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + class DataFlowComputeType(str, Enum): general = "General" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 853ec8158be8..595498114404 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -6458,7 +6458,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6497,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): @@ -23584,48 +23584,6 @@ def __init__(self, **kwargs): self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +23608,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +23616,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,7 +23639,7 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__(self, **kwargs): @@ -23688,45 +23647,10 @@ def __init__(self, **kwargs): self.parent_trigger = kwargs.get('parent_trigger', None) self.requested_start_time = kwargs.get('requested_start_time', None) self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) + self.rerun_concurrency = kwargs.get('rerun_concurrency', None) self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -30053,6 +29977,28 @@ def __init__(self, **kwargs): self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.parent_trigger_name = kwargs.get('parent_trigger_name', None) + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +30019,33 @@ def __init__(self, **kwargs): self.parameters = kwargs.get('parameters', None) +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + class TriggerReference(Model): """Trigger reference type. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8c7c24c27e52..36bac536a805 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -6458,7 +6458,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6497,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: @@ -23584,48 +23584,6 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +23608,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +23616,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,53 +23639,18 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + def __init__(self, *, parent_trigger, requested_start_time, requested_end_time, rerun_concurrency: int, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + self.rerun_concurrency = rerun_concurrency self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -30053,6 +29977,28 @@ def __init__(self, *, reference_trigger, **kwargs) -> None: self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, *, continuation_token: str=None, parent_trigger_name: str=None, **kwargs) -> None: + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.parent_trigger_name = parent_trigger_name + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +30019,33 @@ def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> Non self.parameters = parameters +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + class TriggerReference(Model): """Trigger reference type. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py index f78455cfdb9a..9a46a2afb4ca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -103,19 +103,6 @@ class TriggerResourcePaged(Paged): def __init__(self, *args, **kwargs): super(TriggerResourcePaged, self).__init__(*args, **kwargs) -class RerunTriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`RerunTriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) class DataFlowResourcePaged(Paged): """ A paging container for iterating over a list of :class:`DataFlowResource ` object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 619150f2d6a8..59e9feaff462 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -22,7 +22,6 @@ from ._activity_runs_operations import ActivityRunsOperations from ._triggers_operations import TriggersOperations from ._trigger_runs_operations import TriggerRunsOperations -from ._rerun_triggers_operations import RerunTriggersOperations from ._data_flows_operations import DataFlowsOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations @@ -40,7 +39,6 @@ 'ActivityRunsOperations', 'TriggersOperations', 'TriggerRunsOperations', - 'RerunTriggersOperations', 'DataFlowsOperations', 'DataFlowDebugSessionOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 00201749beee..1f24fba6ac9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -314,7 +314,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, start_from_failure=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -334,6 +334,10 @@ def create_run( :param start_activity_name: In recovery mode, the rerun will start from this activity. If not specified, all activities will run. :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun + will start from failed activities. The property will be used only if + startActivityName is not specified. + :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -366,6 +370,8 @@ def create_run( query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') if start_activity_name is not None: query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py deleted file mode 100644 index 6d5f8e9831de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py +++ /dev/null @@ -1,453 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -import uuid -from msrest.pipeline import ClientRawResponse -from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling - -from .. import models - - -class RerunTriggersOperations(object): - """RerunTriggersOperations operations. - - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - :ivar api_version: The API version. Constant value: "2018-06-01". - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self.api_version = "2018-06-01" - - self.config = config - - def create( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, rerun_tumbling_window_trigger_action_parameters, custom_headers=None, raw=False, **operation_config): - """Creates a rerun trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param rerun_tumbling_window_trigger_action_parameters: Rerun tumbling - window trigger action parameters. - :type rerun_tumbling_window_trigger_action_parameters: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTriggerActionParameters - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: TriggerResource or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.datafactory.models.TriggerResource or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ - # Construct URL - url = self.create.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') - - # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}'} - - - def _start_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.start.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def start( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} - - - def _stop_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.stop.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def stop( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} - - - def _cancel_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.cancel.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def cancel( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Cancels a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._cancel_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} - - def list_by_trigger( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): - """Lists rerun triggers by an original trigger name. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of RerunTriggerResource - :rtype: - ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] - :raises: :class:`CloudError` - """ - def prepare_request(next_link=None): - if not next_link: - # Construct URL - url = self.list_by_trigger.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - header_dict = None - if raw: - header_dict = {} - deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - - return deserialized - list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index 57e31b1bd8c9..4554f5f7f71b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -113,6 +113,79 @@ def internal_paging(next_link=None): return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} + def query_by_factory( + self, resource_group_name, factory_name, continuation_token=None, parent_trigger_name=None, custom_headers=None, raw=False, **operation_config): + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent + TumblingWindowTrigger to get the child rerun triggers + :type parent_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} + def create_or_update( self, resource_group_name, factory_name, trigger_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): """Creates or updates a trigger.