Skip to content

Commit

Permalink
[AutoPR datafactory/resource-manager] Enable Avro Dataset in public s…
Browse files Browse the repository at this point in the history
…wagger (#6567)

* Generated from ec112148bf30430557ff3fac0c74f0706b1042de

Enable Avro Dataset in public swagger

* Generated from e41431428e45beaa5bbb12344d3332479c095e31

UPDATE
  • Loading branch information
AutorestCI authored Jul 31, 2019
1 parent 85d9892 commit c48179d
Show file tree
Hide file tree
Showing 16 changed files with 551 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,7 @@
from .binary_dataset_py3 import BinaryDataset
from .delimited_text_dataset_py3 import DelimitedTextDataset
from .parquet_dataset_py3 import ParquetDataset
from .avro_dataset_py3 import AvroDataset
from .amazon_s3_dataset_py3 import AmazonS3Dataset
from .activity_policy_py3 import ActivityPolicy
from .azure_function_activity_py3 import AzureFunctionActivity
Expand Down Expand Up @@ -403,6 +404,7 @@
from .delimited_text_read_settings_py3 import DelimitedTextReadSettings
from .delimited_text_source_py3 import DelimitedTextSource
from .parquet_source_py3 import ParquetSource
from .avro_source_py3 import AvroSource
from .copy_source_py3 import CopySource
from .lookup_activity_py3 import LookupActivity
from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity
Expand Down Expand Up @@ -455,12 +457,14 @@
from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings
from .store_write_settings_py3 import StoreWriteSettings
from .parquet_sink_py3 import ParquetSink
from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings
from .format_write_settings_py3 import FormatWriteSettings
from .avro_write_settings_py3 import AvroWriteSettings
from .avro_sink_py3 import AvroSink
from .azure_table_sink_py3 import AzureTableSink
from .azure_queue_sink_py3 import AzureQueueSink
from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink
from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink
from .format_write_settings_py3 import FormatWriteSettings
from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings
from .delimited_text_sink_py3 import DelimitedTextSink
from .copy_sink_py3 import CopySink
from .copy_activity_py3 import CopyActivity
Expand Down Expand Up @@ -790,6 +794,7 @@
from .binary_dataset import BinaryDataset
from .delimited_text_dataset import DelimitedTextDataset
from .parquet_dataset import ParquetDataset
from .avro_dataset import AvroDataset
from .amazon_s3_dataset import AmazonS3Dataset
from .activity_policy import ActivityPolicy
from .azure_function_activity import AzureFunctionActivity
Expand Down Expand Up @@ -904,6 +909,7 @@
from .delimited_text_read_settings import DelimitedTextReadSettings
from .delimited_text_source import DelimitedTextSource
from .parquet_source import ParquetSource
from .avro_source import AvroSource
from .copy_source import CopySource
from .lookup_activity import LookupActivity
from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity
Expand Down Expand Up @@ -956,12 +962,14 @@
from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings
from .store_write_settings import StoreWriteSettings
from .parquet_sink import ParquetSink
from .delimited_text_write_settings import DelimitedTextWriteSettings
from .format_write_settings import FormatWriteSettings
from .avro_write_settings import AvroWriteSettings
from .avro_sink import AvroSink
from .azure_table_sink import AzureTableSink
from .azure_queue_sink import AzureQueueSink
from .sap_cloud_for_customer_sink import SapCloudForCustomerSink
from .azure_postgre_sql_sink import AzurePostgreSqlSink
from .format_write_settings import FormatWriteSettings
from .delimited_text_write_settings import DelimitedTextWriteSettings
from .delimited_text_sink import DelimitedTextSink
from .copy_sink import CopySink
from .copy_activity import CopyActivity
Expand Down Expand Up @@ -1062,6 +1070,7 @@
SybaseAuthenticationType,
DynamicsDeploymentType,
DynamicsAuthenticationType,
AvroCompressionCodec,
AzureFunctionActivityMethod,
WebActivityMethod,
NetezzaPartitionOption,
Expand Down Expand Up @@ -1371,6 +1380,7 @@
'BinaryDataset',
'DelimitedTextDataset',
'ParquetDataset',
'AvroDataset',
'AmazonS3Dataset',
'ActivityPolicy',
'AzureFunctionActivity',
Expand Down Expand Up @@ -1485,6 +1495,7 @@
'DelimitedTextReadSettings',
'DelimitedTextSource',
'ParquetSource',
'AvroSource',
'CopySource',
'LookupActivity',
'AzureDataExplorerCommandActivity',
Expand Down Expand Up @@ -1537,12 +1548,14 @@
'AzureBlobStorageWriteSettings',
'StoreWriteSettings',
'ParquetSink',
'DelimitedTextWriteSettings',
'FormatWriteSettings',
'AvroWriteSettings',
'AvroSink',
'AzureTableSink',
'AzureQueueSink',
'SapCloudForCustomerSink',
'AzurePostgreSqlSink',
'FormatWriteSettings',
'DelimitedTextWriteSettings',
'DelimitedTextSink',
'CopySink',
'CopyActivity',
Expand Down Expand Up @@ -1642,6 +1655,7 @@
'SybaseAuthenticationType',
'DynamicsDeploymentType',
'DynamicsAuthenticationType',
'AvroCompressionCodec',
'AzureFunctionActivityMethod',
'WebActivityMethod',
'NetezzaPartitionOption',
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from .dataset import Dataset


class AvroDataset(Dataset):
"""Avro dataset.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param description: Dataset description.
:type description: str
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param folder: The folder that this Dataset is in. If not specified,
Dataset will appear at the root level.
:type folder: ~azure.mgmt.datafactory.models.DatasetFolder
:param type: Required. Constant filled by server.
:type type: str
:param location: Required. The location of the avro storage.
:type location: ~azure.mgmt.datafactory.models.DatasetLocation
:param avro_compression_codec: Possible values include: 'none', 'deflate',
'snappy', 'xz', 'bzip2'
:type avro_compression_codec: str or
~azure.mgmt.datafactory.models.AvroCompressionCodec
:param avro_compression_level:
:type avro_compression_level: int
"""

_validation = {
'linked_service_name': {'required': True},
'type': {'required': True},
'location': {'required': True},
'avro_compression_level': {'maximum': 9, 'minimum': 1},
}

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}

def __init__(self, **kwargs):
super(AvroDataset, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.avro_compression_codec = kwargs.get('avro_compression_codec', None)
self.avro_compression_level = kwargs.get('avro_compression_level', None)
self.type = 'Avro'
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from .dataset_py3 import Dataset


class AvroDataset(Dataset):
"""Avro dataset.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param description: Dataset description.
:type description: str
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param folder: The folder that this Dataset is in. If not specified,
Dataset will appear at the root level.
:type folder: ~azure.mgmt.datafactory.models.DatasetFolder
:param type: Required. Constant filled by server.
:type type: str
:param location: Required. The location of the avro storage.
:type location: ~azure.mgmt.datafactory.models.DatasetLocation
:param avro_compression_codec: Possible values include: 'none', 'deflate',
'snappy', 'xz', 'bzip2'
:type avro_compression_codec: str or
~azure.mgmt.datafactory.models.AvroCompressionCodec
:param avro_compression_level:
:type avro_compression_level: int
"""

_validation = {
'linked_service_name': {'required': True},
'type': {'required': True},
'location': {'required': True},
'avro_compression_level': {'maximum': 9, 'minimum': 1},
}

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}

def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None:
super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.location = location
self.avro_compression_codec = avro_compression_codec
self.avro_compression_level = avro_compression_level
self.type = 'Avro'
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from .copy_sink import CopySink


class AvroSink(CopySink):
"""A copy activity Avro sink.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param write_batch_size: Write batch size. Type: integer (or Expression
with resultType integer), minimum: 0.
:type write_batch_size: object
:param write_batch_timeout: Write batch timeout. Type: string (or
Expression with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type write_batch_timeout: object
:param sink_retry_count: Sink retry count. Type: integer (or Expression
with resultType integer).
:type sink_retry_count: object
:param sink_retry_wait: Sink retry wait. Type: string (or Expression with
resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type sink_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the sink data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param store_settings: Avro store settings.
:type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings
:param format_settings: Avro format settings.
:type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings
"""

_validation = {
'type': {'required': True},
}

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'},
}

def __init__(self, **kwargs):
super(AvroSink, self).__init__(**kwargs)
self.store_settings = kwargs.get('store_settings', None)
self.format_settings = kwargs.get('format_settings', None)
self.type = 'AvroSink'
Loading

0 comments on commit c48179d

Please sign in to comment.