-
Notifications
You must be signed in to change notification settings - Fork 2.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[AutoPR datafactory/resource-manager] Add Dataset and CopySource for …
…SAP HANA (#5835) * Generated from 5f85e81e98e9fea4da62b1d4eed0a9bfc4b2bf5e Update Pipeline.json * Generated from 5f85e81e98e9fea4da62b1d4eed0a9bfc4b2bf5e Update Pipeline.json
- Loading branch information
1 parent
cf20f44
commit afb9ffe
Showing
162 changed files
with
6,277 additions
and
471 deletions.
There are no files selected for viewing
168 changes: 146 additions & 22 deletions
168
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
Large diffs are not rendered by default.
Oops, something went wrong.
55 changes: 55 additions & 0 deletions
55
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .dataset_location import DatasetLocation | ||
|
||
|
||
class AmazonS3Location(DatasetLocation): | ||
"""The location of amazon S3 dataset. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. Type of dataset storage location. | ||
:type type: str | ||
:param folder_path: Specify the folder path of dataset. Type: string (or | ||
Expression with resultType string) | ||
:type folder_path: object | ||
:param file_name: Specify the file name of dataset. Type: string (or | ||
Expression with resultType string). | ||
:type file_name: object | ||
:param bucket_name: Specify the bucketName of amazon S3. Type: string (or | ||
Expression with resultType string) | ||
:type bucket_name: object | ||
:param version: Specify the version of amazon S3. Type: string (or | ||
Expression with resultType string). | ||
:type version: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'folder_path': {'key': 'folderPath', 'type': 'object'}, | ||
'file_name': {'key': 'fileName', 'type': 'object'}, | ||
'bucket_name': {'key': 'bucketName', 'type': 'object'}, | ||
'version': {'key': 'version', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, **kwargs): | ||
super(AmazonS3Location, self).__init__(**kwargs) | ||
self.bucket_name = kwargs.get('bucket_name', None) | ||
self.version = kwargs.get('version', None) |
55 changes: 55 additions & 0 deletions
55
...atafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .dataset_location_py3 import DatasetLocation | ||
|
||
|
||
class AmazonS3Location(DatasetLocation): | ||
"""The location of amazon S3 dataset. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. Type of dataset storage location. | ||
:type type: str | ||
:param folder_path: Specify the folder path of dataset. Type: string (or | ||
Expression with resultType string) | ||
:type folder_path: object | ||
:param file_name: Specify the file name of dataset. Type: string (or | ||
Expression with resultType string). | ||
:type file_name: object | ||
:param bucket_name: Specify the bucketName of amazon S3. Type: string (or | ||
Expression with resultType string) | ||
:type bucket_name: object | ||
:param version: Specify the version of amazon S3. Type: string (or | ||
Expression with resultType string). | ||
:type version: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'folder_path': {'key': 'folderPath', 'type': 'object'}, | ||
'file_name': {'key': 'fileName', 'type': 'object'}, | ||
'bucket_name': {'key': 'bucketName', 'type': 'object'}, | ||
'version': {'key': 'version', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: | ||
super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) | ||
self.bucket_name = bucket_name | ||
self.version = version |
78 changes: 78 additions & 0 deletions
78
...atafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .connector_read_setting import ConnectorReadSetting | ||
|
||
|
||
class AmazonS3ReadSetting(ConnectorReadSetting): | ||
"""Azure data lake store read settings. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. The read setting type. | ||
:type type: str | ||
:param max_concurrent_connections: The maximum concurrent connection count | ||
for the source data store. Type: integer (or Expression with resultType | ||
integer). | ||
:type max_concurrent_connections: object | ||
:param recursive: If true, files under the folder path will be read | ||
recursively. Default is true. Type: boolean (or Expression with resultType | ||
boolean). | ||
:type recursive: object | ||
:param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or | ||
Expression with resultType string). | ||
:type wildcard_folder_path: object | ||
:param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or | ||
Expression with resultType string). | ||
:type wildcard_file_name: object | ||
:param prefix: The prefix filter for the S3 object name. Type: string (or | ||
Expression with resultType string). | ||
:type prefix: object | ||
:param enable_partition_discovery: Indicates whether to enable partition | ||
discovery. | ||
:type enable_partition_discovery: bool | ||
:param modified_datetime_start: The start of file's modified datetime. | ||
Type: string (or Expression with resultType string). | ||
:type modified_datetime_start: object | ||
:param modified_datetime_end: The end of file's modified datetime. Type: | ||
string (or Expression with resultType string). | ||
:type modified_datetime_end: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, | ||
'recursive': {'key': 'recursive', 'type': 'object'}, | ||
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, | ||
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, | ||
'prefix': {'key': 'prefix', 'type': 'object'}, | ||
'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, | ||
'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, | ||
'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, **kwargs): | ||
super(AmazonS3ReadSetting, self).__init__(**kwargs) | ||
self.recursive = kwargs.get('recursive', None) | ||
self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) | ||
self.wildcard_file_name = kwargs.get('wildcard_file_name', None) | ||
self.prefix = kwargs.get('prefix', None) | ||
self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) | ||
self.modified_datetime_start = kwargs.get('modified_datetime_start', None) | ||
self.modified_datetime_end = kwargs.get('modified_datetime_end', None) |
78 changes: 78 additions & 0 deletions
78
...actory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .connector_read_setting_py3 import ConnectorReadSetting | ||
|
||
|
||
class AmazonS3ReadSetting(ConnectorReadSetting): | ||
"""Azure data lake store read settings. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. The read setting type. | ||
:type type: str | ||
:param max_concurrent_connections: The maximum concurrent connection count | ||
for the source data store. Type: integer (or Expression with resultType | ||
integer). | ||
:type max_concurrent_connections: object | ||
:param recursive: If true, files under the folder path will be read | ||
recursively. Default is true. Type: boolean (or Expression with resultType | ||
boolean). | ||
:type recursive: object | ||
:param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or | ||
Expression with resultType string). | ||
:type wildcard_folder_path: object | ||
:param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or | ||
Expression with resultType string). | ||
:type wildcard_file_name: object | ||
:param prefix: The prefix filter for the S3 object name. Type: string (or | ||
Expression with resultType string). | ||
:type prefix: object | ||
:param enable_partition_discovery: Indicates whether to enable partition | ||
discovery. | ||
:type enable_partition_discovery: bool | ||
:param modified_datetime_start: The start of file's modified datetime. | ||
Type: string (or Expression with resultType string). | ||
:type modified_datetime_start: object | ||
:param modified_datetime_end: The end of file's modified datetime. Type: | ||
string (or Expression with resultType string). | ||
:type modified_datetime_end: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, | ||
'recursive': {'key': 'recursive', 'type': 'object'}, | ||
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, | ||
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, | ||
'prefix': {'key': 'prefix', 'type': 'object'}, | ||
'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, | ||
'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, | ||
'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: | ||
super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) | ||
self.recursive = recursive | ||
self.wildcard_folder_path = wildcard_folder_path | ||
self.wildcard_file_name = wildcard_file_name | ||
self.prefix = prefix | ||
self.enable_partition_discovery = enable_partition_discovery | ||
self.modified_datetime_start = modified_datetime_start | ||
self.modified_datetime_end = modified_datetime_end |
50 changes: 50 additions & 0 deletions
50
...atafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .dataset_location import DatasetLocation | ||
|
||
|
||
class AzureBlobFSLocation(DatasetLocation): | ||
"""The location of azure blobFS dataset. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. Type of dataset storage location. | ||
:type type: str | ||
:param folder_path: Specify the folder path of dataset. Type: string (or | ||
Expression with resultType string) | ||
:type folder_path: object | ||
:param file_name: Specify the file name of dataset. Type: string (or | ||
Expression with resultType string). | ||
:type file_name: object | ||
:param file_system: Specify the fileSystem of azure blobFS. Type: string | ||
(or Expression with resultType string). | ||
:type file_system: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'folder_path': {'key': 'folderPath', 'type': 'object'}, | ||
'file_name': {'key': 'fileName', 'type': 'object'}, | ||
'file_system': {'key': 'fileSystem', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, **kwargs): | ||
super(AzureBlobFSLocation, self).__init__(**kwargs) | ||
self.file_system = kwargs.get('file_system', None) |
50 changes: 50 additions & 0 deletions
50
...actory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
# coding=utf-8 | ||
# -------------------------------------------------------------------------- | ||
# Copyright (c) Microsoft Corporation. All rights reserved. | ||
# Licensed under the MIT License. See License.txt in the project root for | ||
# license information. | ||
# | ||
# Code generated by Microsoft (R) AutoRest Code Generator. | ||
# Changes may cause incorrect behavior and will be lost if the code is | ||
# regenerated. | ||
# -------------------------------------------------------------------------- | ||
|
||
from .dataset_location_py3 import DatasetLocation | ||
|
||
|
||
class AzureBlobFSLocation(DatasetLocation): | ||
"""The location of azure blobFS dataset. | ||
All required parameters must be populated in order to send to Azure. | ||
:param additional_properties: Unmatched properties from the message are | ||
deserialized this collection | ||
:type additional_properties: dict[str, object] | ||
:param type: Required. Type of dataset storage location. | ||
:type type: str | ||
:param folder_path: Specify the folder path of dataset. Type: string (or | ||
Expression with resultType string) | ||
:type folder_path: object | ||
:param file_name: Specify the file name of dataset. Type: string (or | ||
Expression with resultType string). | ||
:type file_name: object | ||
:param file_system: Specify the fileSystem of azure blobFS. Type: string | ||
(or Expression with resultType string). | ||
:type file_system: object | ||
""" | ||
|
||
_validation = { | ||
'type': {'required': True}, | ||
} | ||
|
||
_attribute_map = { | ||
'additional_properties': {'key': '', 'type': '{object}'}, | ||
'type': {'key': 'type', 'type': 'str'}, | ||
'folder_path': {'key': 'folderPath', 'type': 'object'}, | ||
'file_name': {'key': 'fileName', 'type': 'object'}, | ||
'file_system': {'key': 'fileSystem', 'type': 'object'}, | ||
} | ||
|
||
def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: | ||
super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) | ||
self.file_system = file_system |
Oops, something went wrong.