Skip to content

Commit

Permalink
Generated from 03f9a26a934a6471a8c9dd7400b5efa0dbd36be3
Browse files Browse the repository at this point in the history
Merge branch 'master' into codegen-datafactory-onboard-cli
  • Loading branch information
SDK Automation committed Sep 10, 2020
1 parent e770034 commit 3eddb5b
Show file tree
Hide file tree
Showing 57 changed files with 6,951 additions and 3,158 deletions.
2 changes: 1 addition & 1 deletion src/datafactory/azext_datafactory/azext_metadata.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"azext.isExperimental": true,
"azext.minCliCoreVersion": "2.3.1"
"azext.minCliCoreVersion": "2.11.0"
}
481 changes: 266 additions & 215 deletions src/datafactory/azext_datafactory/generated/_help.py

Large diffs are not rendered by default.

486 changes: 288 additions & 198 deletions src/datafactory/azext_datafactory/generated/_params.py

Large diffs are not rendered by default.

24 changes: 23 additions & 1 deletion src/datafactory/azext_datafactory/generated/action.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
# pylint: disable=protected-access

import argparse
from knack.util import CLIError
from collections import defaultdict
from knack.util import CLIError


class AddFactoryVstsConfiguration(argparse.Action):
Expand Down Expand Up @@ -82,6 +82,28 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
return d


class AddFolder(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.folder = action

def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'name':
d['name'] = v[0]
return d


class AddFilters(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
Expand Down
21 changes: 14 additions & 7 deletions src/datafactory/azext_datafactory/generated/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def load_command_table(self, _):
g.custom_show_command('show', 'datafactory_factory_show')
g.custom_command('create', 'datafactory_factory_create')
g.custom_command('update', 'datafactory_factory_update')
g.custom_command('delete', 'datafactory_factory_delete')
g.custom_command('delete', 'datafactory_factory_delete', confirmation=True)
g.custom_command('configure-factory-repo', 'datafactory_factory_configure_factory_repo')
g.custom_command('get-data-plane-access', 'datafactory_factory_get_data_plane_access')
g.custom_command('get-git-hub-access-token', 'datafactory_factory_get_git_hub_access_token')
Expand All @@ -45,7 +45,7 @@ def load_command_table(self, _):
g.custom_command('managed create', 'datafactory_integration_runtime_managed_create')
g.custom_command('self-hosted create', 'datafactory_integration_runtime_self_hosted_create')
g.custom_command('update', 'datafactory_integration_runtime_update')
g.custom_command('delete', 'datafactory_integration_runtime_delete')
g.custom_command('delete', 'datafactory_integration_runtime_delete', confirmation=True)
g.custom_command('get-connection-info', 'datafactory_integration_runtime_get_connection_info')
g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data')
g.custom_command('get-status', 'datafactory_integration_runtime_get_status')
Expand All @@ -67,7 +67,7 @@ def load_command_table(self, _):
client_factory=cf_integration_runtime_node, is_experimental=True) as g:
g.custom_show_command('show', 'datafactory_integration_runtime_node_show')
g.custom_command('update', 'datafactory_integration_runtime_node_update')
g.custom_command('delete', 'datafactory_integration_runtime_node_delete')
g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True)
g.custom_command('get-ip-address', 'datafactory_integration_runtime_node_get_ip_address')

from azext_datafactory.generated._client_factory import cf_linked_service
Expand All @@ -80,7 +80,9 @@ def load_command_table(self, _):
g.custom_command('list', 'datafactory_linked_service_list')
g.custom_show_command('show', 'datafactory_linked_service_show')
g.custom_command('create', 'datafactory_linked_service_create')
g.custom_command('delete', 'datafactory_linked_service_delete')
g.generic_update_command('update', setter_arg_name='properties', custom_func_name=''
'datafactory_linked_service_update')
g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True)

from azext_datafactory.generated._client_factory import cf_dataset
datafactory_dataset = CliCommandType(
Expand All @@ -92,7 +94,9 @@ def load_command_table(self, _):
g.custom_command('list', 'datafactory_dataset_list')
g.custom_show_command('show', 'datafactory_dataset_show')
g.custom_command('create', 'datafactory_dataset_create')
g.custom_command('delete', 'datafactory_dataset_delete')
g.generic_update_command('update', setter_arg_name='properties',
custom_func_name='datafactory_dataset_update')
g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True)

from azext_datafactory.generated._client_factory import cf_pipeline
datafactory_pipeline = CliCommandType(
Expand All @@ -105,7 +109,7 @@ def load_command_table(self, _):
g.custom_show_command('show', 'datafactory_pipeline_show')
g.custom_command('create', 'datafactory_pipeline_create')
g.generic_update_command('update', setter_arg_name='pipeline', custom_func_name='datafactory_pipeline_update')
g.custom_command('delete', 'datafactory_pipeline_delete')
g.custom_command('delete', 'datafactory_pipeline_delete', confirmation=True)
g.custom_command('create-run', 'datafactory_pipeline_create_run')

from azext_datafactory.generated._client_factory import cf_pipeline_run
Expand Down Expand Up @@ -138,7 +142,9 @@ def load_command_table(self, _):
g.custom_command('list', 'datafactory_trigger_list')
g.custom_show_command('show', 'datafactory_trigger_show')
g.custom_command('create', 'datafactory_trigger_create')
g.custom_command('delete', 'datafactory_trigger_delete')
g.generic_update_command('update', setter_arg_name='properties',
custom_func_name='datafactory_trigger_update')
g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True)
g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status')
g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory')
g.custom_command('start', 'datafactory_trigger_start', supports_no_wait=True)
Expand All @@ -155,5 +161,6 @@ def load_command_table(self, _):
client_factory=cf_trigger_run)
with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run,
is_experimental=True) as g:
g.custom_command('cancel', 'datafactory_trigger_run_cancel')
g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory')
g.custom_command('rerun', 'datafactory_trigger_run_rerun')
103 changes: 81 additions & 22 deletions src/datafactory/azext_datafactory/generated/custom.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
# pylint: disable=too-many-lines
# pylint: disable=unused-argument

import json
from knack.util import CLIError
from azure.cli.core.util import sdk_no_wait

Expand Down Expand Up @@ -53,7 +54,7 @@ def datafactory_factory_create(client,
if_match=if_match,
location=location,
tags=tags,
identity={"type": "SystemAssigned"},
identity=json.loads("{\"type\": \"SystemAssigned\"}"),
repo_configuration=repo_configuration)


Expand All @@ -64,7 +65,7 @@ def datafactory_factory_update(client,
return client.update(resource_group_name=resource_group_name,
factory_name=factory_name,
tags=tags,
identity={"type": "SystemAssigned"})
identity=json.loads("{\"type\": \"SystemAssigned\"}"))


def datafactory_factory_delete(client,
Expand Down Expand Up @@ -395,6 +396,26 @@ def datafactory_linked_service_create(client,
properties=properties)


def datafactory_linked_service_update(instance,
resource_group_name,
factory_name,
linked_service_name,
if_match=None,
connect_via=None,
description=None,
parameters=None,
annotations=None):
if connect_via is not None:
instance.connect_via = connect_via
if description is not None:
instance.description = description
if parameters is not None:
instance.parameters = parameters
if annotations is not None:
instance.annotations = annotations
return instance


def datafactory_linked_service_delete(client,
resource_group_name,
factory_name,
Expand Down Expand Up @@ -435,6 +456,35 @@ def datafactory_dataset_create(client,
properties=properties)


def datafactory_dataset_update(instance,
resource_group_name,
factory_name,
dataset_name,
linked_service_name,
if_match=None,
description=None,
structure=None,
schema=None,
parameters=None,
annotations=None,
folder=None):
if description is not None:
instance.description = description
if structure is not None:
instance.structure = structure
if schema is not None:
instance.schema = schema
if linked_service_name is not None:
instance.linked_service_name = linked_service_name
if parameters is not None:
instance.parameters = parameters
if annotations is not None:
instance.annotations = annotations
if folder is not None:
instance.folder = folder
return instance


def datafactory_dataset_delete(client,
resource_group_name,
factory_name,
Expand Down Expand Up @@ -488,22 +538,6 @@ def datafactory_pipeline_update(instance,
annotations=None,
run_dimensions=None,
folder_name=None):
if description is not None:
instance.description = description
if activities is not None:
instance.activities = activities
if parameters is not None:
instance.parameters = parameters
if variables is not None:
instance.variables = variables
if concurrency is not None:
instance.concurrency = concurrency
if annotations is not None:
instance.annotations = annotations
if run_dimensions is not None:
instance.run_dimensions = run_dimensions
if folder_name is not None:
instance.name_properties_folder_name = folder_name
return instance


Expand Down Expand Up @@ -565,7 +599,7 @@ def datafactory_pipeline_run_query_by_factory(client,
order_by=None):
return client.query_by_factory(resource_group_name=resource_group_name,
factory_name=factory_name,
continuation_token=continuation_token,
continuation_token_parameter=continuation_token,
last_updated_after=last_updated_after,
last_updated_before=last_updated_before,
filters=filters,
Expand All @@ -584,7 +618,7 @@ def datafactory_activity_run_query_by_pipeline_run(client,
return client.query_by_pipeline_run(resource_group_name=resource_group_name,
factory_name=factory_name,
run_id=run_id,
continuation_token=continuation_token,
continuation_token_parameter=continuation_token,
last_updated_after=last_updated_after,
last_updated_before=last_updated_before,
filters=filters,
Expand Down Expand Up @@ -622,6 +656,20 @@ def datafactory_trigger_create(client,
properties=properties)


def datafactory_trigger_update(instance,
resource_group_name,
factory_name,
trigger_name,
if_match=None,
description=None,
annotations=None):
if description is not None:
instance.description = description
if annotations is not None:
instance.annotations = annotations
return instance


def datafactory_trigger_delete(client,
resource_group_name,
factory_name,
Expand All @@ -647,7 +695,7 @@ def datafactory_trigger_query_by_factory(client,
parent_trigger_name=None):
return client.query_by_factory(resource_group_name=resource_group_name,
factory_name=factory_name,
continuation_token=continuation_token,
continuation_token_parameter=continuation_token,
parent_trigger_name=parent_trigger_name)


Expand Down Expand Up @@ -699,6 +747,17 @@ def datafactory_trigger_unsubscribe_from_event(client,
trigger_name=trigger_name)


def datafactory_trigger_run_cancel(client,
resource_group_name,
factory_name,
trigger_name,
run_id):
return client.cancel(resource_group_name=resource_group_name,
factory_name=factory_name,
trigger_name=trigger_name,
run_id=run_id)


def datafactory_trigger_run_query_by_factory(client,
resource_group_name,
factory_name,
Expand All @@ -709,7 +768,7 @@ def datafactory_trigger_run_query_by_factory(client,
order_by=None):
return client.query_by_factory(resource_group_name=resource_group_name,
factory_name=factory_name,
continuation_token=continuation_token,
continuation_token_parameter=continuation_token,
last_updated_after=last_updated_after,
last_updated_before=last_updated_before,
filters=filters,
Expand Down
Loading

0 comments on commit 3eddb5b

Please sign in to comment.