Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AutoPR datafactory/resource-manager] [Datafactory] Add Azure Function to Swagger #4146

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from .version import VERSION
from .operations.operations import Operations
from .operations.factories_operations import FactoriesOperations
from .operations.exposure_control_operations import ExposureControlOperations
from .operations.integration_runtimes_operations import IntegrationRuntimesOperations
from .operations.integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations
from .operations.integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations
Expand Down Expand Up @@ -71,6 +72,8 @@ class DataFactoryManagementClient(SDKClient):
:vartype operations: azure.mgmt.datafactory.operations.Operations
:ivar factories: Factories operations
:vartype factories: azure.mgmt.datafactory.operations.FactoriesOperations
:ivar exposure_control: ExposureControl operations
:vartype exposure_control: azure.mgmt.datafactory.operations.ExposureControlOperations
:ivar integration_runtimes: IntegrationRuntimes operations
:vartype integration_runtimes: azure.mgmt.datafactory.operations.IntegrationRuntimesOperations
:ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadata operations
Expand Down Expand Up @@ -117,6 +120,8 @@ def __init__(
self._client, self.config, self._serialize, self._deserialize)
self.factories = FactoriesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.exposure_control = ExposureControlOperations(
self._client, self.config, self._serialize, self._deserialize)
self.integration_runtimes = IntegrationRuntimesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(
Expand Down
17 changes: 17 additions & 0 deletions azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@
from .operation_py3 import Operation
from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest
from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse
from .exposure_control_request_py3 import ExposureControlRequest
from .exposure_control_response_py3 import ExposureControlResponse
from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference
from .trigger_reference_py3 import TriggerReference
from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference
Expand All @@ -92,6 +94,7 @@
from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence
from .schedule_trigger_py3 import ScheduleTrigger
from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger
from .azure_function_linked_service_py3 import AzureFunctionLinkedService
from .responsys_linked_service_py3 import ResponsysLinkedService
from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService
Expand Down Expand Up @@ -234,6 +237,7 @@
from .azure_blob_dataset_py3 import AzureBlobDataset
from .amazon_s3_dataset_py3 import AmazonS3Dataset
from .activity_policy_py3 import ActivityPolicy
from .azure_function_activity_py3 import AzureFunctionActivity
from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity
from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity
from .databricks_notebook_activity_py3 import DatabricksNotebookActivity
Expand Down Expand Up @@ -306,6 +310,7 @@
from .custom_activity_py3 import CustomActivity
from .ssis_property_override_py3 import SSISPropertyOverride
from .ssis_execution_parameter_py3 import SSISExecutionParameter
from .ssis_execution_credential_py3 import SSISExecutionCredential
from .ssis_package_location_py3 import SSISPackageLocation
from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity
from .hd_insight_spark_activity_py3 import HDInsightSparkActivity
Expand Down Expand Up @@ -438,6 +443,8 @@
from .operation import Operation
from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest
from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse
from .exposure_control_request import ExposureControlRequest
from .exposure_control_response import ExposureControlResponse
from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference
from .trigger_reference import TriggerReference
from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference
Expand All @@ -452,6 +459,7 @@
from .schedule_trigger_recurrence import ScheduleTriggerRecurrence
from .schedule_trigger import ScheduleTrigger
from .multiple_pipeline_trigger import MultiplePipelineTrigger
from .azure_function_linked_service import AzureFunctionLinkedService
from .responsys_linked_service import ResponsysLinkedService
from .azure_databricks_linked_service import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService
Expand Down Expand Up @@ -594,6 +602,7 @@
from .azure_blob_dataset import AzureBlobDataset
from .amazon_s3_dataset import AmazonS3Dataset
from .activity_policy import ActivityPolicy
from .azure_function_activity import AzureFunctionActivity
from .databricks_spark_python_activity import DatabricksSparkPythonActivity
from .databricks_spark_jar_activity import DatabricksSparkJarActivity
from .databricks_notebook_activity import DatabricksNotebookActivity
Expand Down Expand Up @@ -666,6 +675,7 @@
from .custom_activity import CustomActivity
from .ssis_property_override import SSISPropertyOverride
from .ssis_execution_parameter import SSISExecutionParameter
from .ssis_execution_credential import SSISExecutionCredential
from .ssis_package_location import SSISPackageLocation
from .execute_ssis_package_activity import ExecuteSSISPackageActivity
from .hd_insight_spark_activity import HDInsightSparkActivity
Expand Down Expand Up @@ -777,6 +787,7 @@
SybaseAuthenticationType,
DatasetCompressionLevel,
JsonFormatFilePattern,
AzureFunctionActivityMethod,
WebActivityMethod,
CassandraSourceReadConsistencyLevels,
StoredProcedureParameterType,
Expand Down Expand Up @@ -868,6 +879,8 @@
'Operation',
'GetSsisObjectMetadataRequest',
'SsisObjectMetadataStatusResponse',
'ExposureControlRequest',
'ExposureControlResponse',
'SelfDependencyTumblingWindowTriggerReference',
'TriggerReference',
'TumblingWindowTriggerDependencyReference',
Expand All @@ -882,6 +895,7 @@
'ScheduleTriggerRecurrence',
'ScheduleTrigger',
'MultiplePipelineTrigger',
'AzureFunctionLinkedService',
'ResponsysLinkedService',
'AzureDatabricksLinkedService',
'AzureDataLakeAnalyticsLinkedService',
Expand Down Expand Up @@ -1024,6 +1038,7 @@
'AzureBlobDataset',
'AmazonS3Dataset',
'ActivityPolicy',
'AzureFunctionActivity',
'DatabricksSparkPythonActivity',
'DatabricksSparkJarActivity',
'DatabricksNotebookActivity',
Expand Down Expand Up @@ -1096,6 +1111,7 @@
'CustomActivity',
'SSISPropertyOverride',
'SSISExecutionParameter',
'SSISExecutionCredential',
'SSISPackageLocation',
'ExecuteSSISPackageActivity',
'HDInsightSparkActivity',
Expand Down Expand Up @@ -1206,6 +1222,7 @@
'SybaseAuthenticationType',
'DatasetCompressionLevel',
'JsonFormatFilePattern',
'AzureFunctionActivityMethod',
'WebActivityMethod',
'CassandraSourceReadConsistencyLevels',
'StoredProcedureParameterType',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AmazonMWSObjectDataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -53,6 +57,7 @@ class AmazonMWSObjectDataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AmazonMWSObjectDataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -53,6 +57,7 @@ class AmazonMWSObjectDataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand All @@ -61,7 +66,7 @@ class AmazonMWSObjectDataset(Dataset):
'table_name': {'key': 'typeProperties.tableName', 'type': 'object'},
}

def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None:
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None:
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.table_name = table_name
self.type = 'AmazonMWSObject'
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AmazonS3Dataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -68,6 +72,7 @@ class AmazonS3Dataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AmazonS3Dataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -68,6 +72,7 @@ class AmazonS3Dataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand All @@ -81,8 +86,8 @@ class AmazonS3Dataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None:
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None:
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.bucket_name = bucket_name
self.key = key
self.prefix = prefix
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AzureBlobDataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -63,6 +67,7 @@ class AzureBlobDataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AzureBlobDataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -63,6 +67,7 @@ class AzureBlobDataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand All @@ -75,8 +80,8 @@ class AzureBlobDataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None, **kwargs) -> None:
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None, **kwargs) -> None:
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.folder_path = folder_path
self.table_root_location = table_root_location
self.file_name = file_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ class AzureDataLakeStoreDataset(Dataset):
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
Expand Down Expand Up @@ -62,6 +66,7 @@ class AzureDataLakeStoreDataset(Dataset):
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
Expand Down
Loading