From f0dad649b944c77a7402751445ecc71512443f3a Mon Sep 17 00:00:00 2001 From: njucz <740360112@qq.com> Date: Wed, 14 Jul 2021 15:55:06 +0800 Subject: [PATCH] new resource "azurerm_data_factory_data_flow" --- .../services/datafactory/client/client.go | 5 + .../datafactory/data_factory_data_flow.go | 251 ++++++++++++++ .../data_factory_data_flow_resource.go | 274 +++++++++++++++ .../data_factory_data_flow_resource_test.go | 318 ++++++++++++++++++ .../services/datafactory/parse/data_flow.go | 75 +++++ .../datafactory/parse/data_flow_test.go | 128 +++++++ .../services/datafactory/registration.go | 1 + .../services/datafactory/resourceids.go | 1 + .../datafactory/validate/data_flow_id.go | 23 ++ .../datafactory/validate/data_flow_id_test.go | 88 +++++ .../r/data_factory_data_flow.html.markdown | 215 ++++++++++++ 11 files changed, 1379 insertions(+) create mode 100644 azurerm/internal/services/datafactory/data_factory_data_flow.go create mode 100644 azurerm/internal/services/datafactory/data_factory_data_flow_resource.go create mode 100644 azurerm/internal/services/datafactory/data_factory_data_flow_resource_test.go create mode 100644 azurerm/internal/services/datafactory/parse/data_flow.go create mode 100644 azurerm/internal/services/datafactory/parse/data_flow_test.go create mode 100644 azurerm/internal/services/datafactory/validate/data_flow_id.go create mode 100644 azurerm/internal/services/datafactory/validate/data_flow_id_test.go create mode 100644 website/docs/r/data_factory_data_flow.html.markdown diff --git a/azurerm/internal/services/datafactory/client/client.go b/azurerm/internal/services/datafactory/client/client.go index 22121beb5f18..493dd650ed41 100644 --- a/azurerm/internal/services/datafactory/client/client.go +++ b/azurerm/internal/services/datafactory/client/client.go @@ -6,6 +6,7 @@ import ( ) type Client struct { + DataFlowClient *datafactory.DataFlowsClient DatasetClient *datafactory.DatasetsClient FactoriesClient *datafactory.FactoriesClient IntegrationRuntimesClient *datafactory.IntegrationRuntimesClient @@ -17,6 +18,9 @@ type Client struct { } func NewClient(o *common.ClientOptions) *Client { + dataFlowClient := datafactory.NewDataFlowsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&dataFlowClient.Client, o.ResourceManagerAuthorizer) + DatasetClient := datafactory.NewDatasetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&DatasetClient.Client, o.ResourceManagerAuthorizer) @@ -42,6 +46,7 @@ func NewClient(o *common.ClientOptions) *Client { o.ConfigureClient(&TriggersClient.Client, o.ResourceManagerAuthorizer) return &Client{ + DataFlowClient: &dataFlowClient, DatasetClient: &DatasetClient, FactoriesClient: &FactoriesClient, IntegrationRuntimesClient: &IntegrationRuntimesClient, diff --git a/azurerm/internal/services/datafactory/data_factory_data_flow.go b/azurerm/internal/services/datafactory/data_factory_data_flow.go new file mode 100644 index 000000000000..ad04dfa3fb3b --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_data_flow.go @@ -0,0 +1,251 @@ +package datafactory + +import ( + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func SchemaForDataFlowSourceAndSink() *pluginsdk.Schema { + return &pluginsdk.Schema{ + Type: pluginsdk.TypeList, + Required: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "dataset": { + Type: pluginsdk.TypeList, + Optional: true, + MaxItems: 1, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "parameters": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + }, + }, + }, + + "linked_service": { + Type: pluginsdk.TypeList, + Optional: true, + MaxItems: 1, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "parameters": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + }, + }, + }, + + "schema_linked_service": { + Type: pluginsdk.TypeList, + Optional: true, + MaxItems: 1, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "parameters": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + }, + }, + }, + }, + }, + } +} + +func expandDataFactoryDataFlowSource(input []interface{}) *[]datafactory.DataFlowSource { + if len(input) == 0 || input[0] == nil { + return nil + } + + result := make([]datafactory.DataFlowSource, 0) + for _, v := range input { + raw := v.(map[string]interface{}) + result = append(result, datafactory.DataFlowSource{ + Description: utils.String(raw["description"].(string)), + Name: utils.String(raw["name"].(string)), + Dataset: expandDataFactoryDatasetReference(raw["dataset"].([]interface{})), + LinkedService: expandDataFactoryLinkedServiceReference(raw["linked_service"].([]interface{})), + SchemaLinkedService: expandDataFactoryLinkedServiceReference(raw["schema_linked_service"].([]interface{})), + }) + } + return &result +} + +func expandDataFactoryDataFlowSink(input []interface{}) *[]datafactory.DataFlowSink { + if len(input) == 0 || input[0] == nil { + return nil + } + + result := make([]datafactory.DataFlowSink, 0) + for _, v := range input { + raw := v.(map[string]interface{}) + result = append(result, datafactory.DataFlowSink{ + Description: utils.String(raw["description"].(string)), + Name: utils.String(raw["name"].(string)), + Dataset: expandDataFactoryDatasetReference(raw["dataset"].([]interface{})), + LinkedService: expandDataFactoryLinkedServiceReference(raw["linked_service"].([]interface{})), + SchemaLinkedService: expandDataFactoryLinkedServiceReference(raw["schema_linked_service"].([]interface{})), + }) + } + return &result +} + +func expandDataFactoryDatasetReference(input []interface{}) *datafactory.DatasetReference { + if len(input) == 0 || input[0] == nil { + return nil + } + + raw := input[0].(map[string]interface{}) + return &datafactory.DatasetReference{ + Type: utils.String("DatasetReference"), + ReferenceName: utils.String(raw["name"].(string)), + Parameters: raw["parameters"].(map[string]interface{}), + } +} + +func expandDataFactoryLinkedServiceReference(input []interface{}) *datafactory.LinkedServiceReference { + if len(input) == 0 || input[0] == nil { + return nil + } + + raw := input[0].(map[string]interface{}) + return &datafactory.LinkedServiceReference{ + Type: utils.String("LinkedServiceReference"), + ReferenceName: utils.String(raw["name"].(string)), + Parameters: raw["parameters"].(map[string]interface{}), + } +} + +func flattenDataFactoryDataFlowSource(input *[]datafactory.DataFlowSource) []interface{} { + if input == nil { + return []interface{}{} + } + + result := make([]interface{}, 0) + for _, v := range *input { + name := "" + description := "" + if v.Name != nil { + name = *v.Name + } + if v.Description != nil { + description = *v.Description + } + result = append(result, map[string]interface{}{ + "name": name, + "description": description, + "dataset": flattenDataFactoryDatasetReference(v.Dataset), + "linked_service": flattenDataFactoryLinkedServiceReference(v.LinkedService), + "schema_linked_service": flattenDataFactoryLinkedServiceReference(v.SchemaLinkedService), + }) + } + return result +} + +func flattenDataFactoryDataFlowSink(input *[]datafactory.DataFlowSink) []interface{} { + if input == nil { + return []interface{}{} + } + + result := make([]interface{}, 0) + for _, v := range *input { + name := "" + description := "" + if v.Name != nil { + name = *v.Name + } + if v.Description != nil { + description = *v.Description + } + result = append(result, map[string]interface{}{ + "name": name, + "description": description, + "dataset": flattenDataFactoryDatasetReference(v.Dataset), + "linked_service": flattenDataFactoryLinkedServiceReference(v.LinkedService), + "schema_linked_service": flattenDataFactoryLinkedServiceReference(v.SchemaLinkedService), + }) + } + return result +} + +func flattenDataFactoryDatasetReference(input *datafactory.DatasetReference) []interface{} { + if input == nil { + return []interface{}{} + } + + name := "" + if input.ReferenceName != nil { + name = *input.ReferenceName + } + + return []interface{}{ + map[string]interface{}{ + "name": name, + "parameters": input.Parameters, + }, + } +} + +func flattenDataFactoryLinkedServiceReference(input *datafactory.LinkedServiceReference) []interface{} { + if input == nil { + return []interface{}{} + } + + name := "" + if input.ReferenceName != nil { + name = *input.ReferenceName + } + + return []interface{}{ + map[string]interface{}{ + "name": name, + "parameters": input.Parameters, + }, + } +} diff --git a/azurerm/internal/services/datafactory/data_factory_data_flow_resource.go b/azurerm/internal/services/datafactory/data_factory_data_flow_resource.go new file mode 100644 index 000000000000..6ee0dc41eb27 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_data_flow_resource.go @@ -0,0 +1,274 @@ +package datafactory + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataFactoryDataFlow() *pluginsdk.Resource { + return &pluginsdk.Resource{ + Create: resourceDataFactoryDataFlowCreateUpdate, + Read: resourceDataFactoryDataFlowRead, + Update: resourceDataFactoryDataFlowCreateUpdate, + Delete: resourceDataFactoryDataFlowDelete, + + Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + _, err := parse.DataFlowID(id) + return err + }), + + Timeouts: &pluginsdk.ResourceTimeout{ + Create: pluginsdk.DefaultTimeout(30 * time.Minute), + Read: pluginsdk.DefaultTimeout(5 * time.Minute), + Update: pluginsdk.DefaultTimeout(30 * time.Minute), + Delete: pluginsdk.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + }, + + "script": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "source": SchemaForDataFlowSourceAndSink(), + + "sink": SchemaForDataFlowSourceAndSink(), + + "transformation": { + Type: pluginsdk.TypeList, + Optional: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "annotations": { + Type: pluginsdk.TypeList, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "folder": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} + +func resourceDataFactoryDataFlowCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DataFlowClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + dataFactoryId, err := parse.DataFactoryID(d.Get("data_factory_id").(string)) + if err != nil { + return err + } + + id := parse.NewDataFlowID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + } + + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_data_factory_data_flow", id.ID()) + } + } + + mappingDataFlow := datafactory.MappingDataFlow{ + MappingDataFlowTypeProperties: &datafactory.MappingDataFlowTypeProperties{ + Script: utils.String(d.Get("script").(string)), + Sinks: expandDataFactoryDataFlowSink(d.Get("sink").([]interface{})), + Sources: expandDataFactoryDataFlowSource(d.Get("source").([]interface{})), + Transformations: expandDataFactoryDataFlowTransformation(d.Get("transformation").([]interface{})), + }, + Description: utils.String(d.Get("description").(string)), + Type: datafactory.TypeBasicDataFlowTypeMappingDataFlow, + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + mappingDataFlow.Annotations = &annotations + } + + if v, ok := d.GetOk("folder"); ok { + mappingDataFlow.Folder = &datafactory.DataFlowFolder{ + Name: utils.String(v.(string)), + } + } + + dataFlow := datafactory.DataFlowResource{ + Properties: &mappingDataFlow, + } + + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataFlow, ""); err != nil { + return fmt.Errorf(" creating/updating %s: %+v", id, err) + } + + d.SetId(id.ID()) + + return resourceDataFactoryDataFlowRead(d, meta) +} + +func resourceDataFactoryDataFlowRead(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DataFlowClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataFlowID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving %s: %+v", id, err) + } + + mappingDataFlow, ok := resp.Properties.AsMappingDataFlow() + if !ok { + return fmt.Errorf("classifying type of %s: Expected: %q", id, datafactory.TypeBasicDataFlowTypeMappingDataFlow) + } + + d.Set("name", id.Name) + d.Set("data_factory_id", parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName).ID()) + d.Set("description", mappingDataFlow.Description) + + if err := d.Set("annotations", flattenDataFactoryAnnotations(mappingDataFlow.Annotations)); err != nil { + return fmt.Errorf("setting `annotations`: %+v", err) + } + + folder := "" + if mappingDataFlow.Folder != nil && mappingDataFlow.Folder.Name != nil { + folder = *mappingDataFlow.Folder.Name + } + d.Set("folder", folder) + + if prop := mappingDataFlow.MappingDataFlowTypeProperties; prop != nil { + d.Set("script", prop.Script) + + if err := d.Set("source", flattenDataFactoryDataFlowSource(prop.Sources)); err != nil { + return fmt.Errorf("setting `source`: %+v", err) + } + if err := d.Set("sink", flattenDataFactoryDataFlowSink(prop.Sinks)); err != nil { + return fmt.Errorf("setting `sink`: %+v", err) + } + if err := d.Set("transformation", flattenDataFactoryDataFlowTransformation(prop.Transformations)); err != nil { + return fmt.Errorf("setting `transformation`: %+v", err) + } + } + + return nil +} + +func resourceDataFactoryDataFlowDelete(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DataFlowClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataFlowID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) + } + + return nil +} + +func expandDataFactoryDataFlowTransformation(input []interface{}) *[]datafactory.Transformation { + if len(input) == 0 || input[0] == nil { + return nil + } + + result := make([]datafactory.Transformation, 0) + for _, v := range input { + raw := v.(map[string]interface{}) + result = append(result, datafactory.Transformation{ + Description: utils.String(raw["description"].(string)), + Name: utils.String(raw["name"].(string)), + }) + } + return &result +} + +func flattenDataFactoryDataFlowTransformation(input *[]datafactory.Transformation) []interface{} { + if input == nil { + return []interface{}{} + } + + result := make([]interface{}, 0) + for _, v := range *input { + name := "" + description := "" + if v.Name != nil { + name = *v.Name + } + if v.Description != nil { + description = *v.Description + } + result = append(result, map[string]interface{}{ + "name": name, + "description": description, + }) + } + return result +} diff --git a/azurerm/internal/services/datafactory/data_factory_data_flow_resource_test.go b/azurerm/internal/services/datafactory/data_factory_data_flow_resource_test.go new file mode 100644 index 000000000000..65d4de61a131 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_data_flow_resource_test.go @@ -0,0 +1,318 @@ +package datafactory_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataFlowResource struct { +} + +func TestAccDataFactoryDataFlow_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_data_flow", "test") + r := DataFlowResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDataFlow_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_data_flow", "test") + r := DataFlowResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDataFactoryDataFlow_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_data_flow", "test") + r := DataFlowResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.complete(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDataFlow_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_data_flow", "test") + r := DataFlowResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DataFlowResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + id, err := parse.DataFlowID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataFactory.DataFlowClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + return nil, fmt.Errorf("reading %s: %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r DataFlowResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_factory_data_flow" "test" { + name = "acctestdf%d" + data_factory_id = azurerm_data_factory.test.id + + source { + name = "source1" + + linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + } + } + + sink { + name = "sink1" + + linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + } + } + + script = < source1 +source1 sink( + allowSchemaDrift: true, + validateSchema: false, + skipDuplicateMapInputs: true, + skipDuplicateMapOutputs: true) ~> sink1 +EOT +} +`, r.template(data), data.RandomInteger) +} + +func (r DataFlowResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_factory_data_flow" "import" { + name = azurerm_data_factory_data_flow.test.name + data_factory_id = azurerm_data_factory_data_flow.test.data_factory_id + script = azurerm_data_factory_data_flow.test.script + source { + name = azurerm_data_factory_data_flow.test.source.0.name + linked_service { + name = azurerm_data_factory_data_flow.test.source.0.linked_service.0.name + } + } + + sink { + name = azurerm_data_factory_data_flow.test.sink.0.name + linked_service { + name = azurerm_data_factory_data_flow.test.sink.0.linked_service.0.name + } + } +} +`, r.basic(data)) +} + +func (r DataFlowResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_factory_data_flow" "test" { + name = "acctestdf%d" + data_factory_id = azurerm_data_factory.test.id + description = "description for data flow" + annotations = ["anno1", "anno2"] + folder = "folder1" + + source { + name = "source1" + description = "description for source1" + + linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + parameters = { + "Key1" = "value1" + } + } + + schema_linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + parameters = { + "Key1" = "value1" + } + } + } + + sink { + name = "sink1" + description = "description for sink1" + + linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + parameters = { + "Key1" = "value1" + } + } + + schema_linked_service { + name = azurerm_data_factory_linked_custom_service.test.name + parameters = { + "Key1" = "value1" + } + } + } + + transformation { + name = "filter1" + description = "description for filter1" + } + + script = < source1 +source1 filter(toInteger(year) >= 1910 && toInteger(year) <= 2000) ~> Filter1 +Filter1 sink(allowSchemaDrift: true, + validateSchema: false, + skipDuplicateMapInputs: true, + skipDuplicateMapOutputs: true, + saveOrder: 0, + partitionBy('roundRobin', 3)) ~> sink1 +EOT +} +`, r.template(data), data.RandomInteger) +} + +func (DataFlowResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_custom_service" "test" { + name = "acctestls%d" + data_factory_id = azurerm_data_factory.test.id + type = "AzureBlobStorage" + type_properties_json = < source1 +source1 sink( + allowSchemaDrift: true, + validateSchema: false, + skipDuplicateMapInputs: true, + skipDuplicateMapOutputs: true) ~> sink1 +EOT +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created. + +* `data_factory_id` - (Required) The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource. + +* `script` - (Required) The script for the Data Factory Data Flow. + +* `source` - (Required) One or more `source` blocks as defined below. + +* `sink` - (Required) One or more `sink` blocks as defined below. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Data Flow. + +* `description` - (Optional) The description for the Data Factory Data Flow. + +* `folder` - (Optional) The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level. + +* `transformation` - (Optional) One or more `transformation` blocks as defined below. + +--- + +A `source` block supports the following: + +* `name` - (Required) The name for the Data Flow Source. + +* `description` - (Optional) The description for the Data Flow Source. + +* `dataset` - (Optional) A `dataset` block as defined below. + +* `linked_service` - (Optional) A `linked_service` block as defined below. + +* `schema_linked_service` - (Optional) A `schema_linked_service` block as defined below. + +--- + +A `sink` block supports the following: + +* `name` - (Required) The name for the Data Flow Source. + +* `description` - (Optional) The description for the Data Flow Source. + +* `dataset` - (Optional) A `dataset` block as defined below. + +* `linked_service` - (Optional) A `linked_service` block as defined below. + +* `schema_linked_service` - (Optional) A `schema_linked_service` block as defined below. + +--- + +A `dataset` block supports the following: + +* `name` - (Required) The name for the Data Factory Dataset. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory dataset. + +--- + +A `linked_service` block supports the following: + +* `name` - (Required) The name for the Data Factory Linked Service. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Linked Service. + +--- + +A `schema_linked_service` block supports the following: + +* `name` - (Required) The name for the Data Factory Linked Service with schema. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Linked Service. + +--- + +A `transformation` block supports the following: + +* `name` - (Required) The name for the Data Flow transformation. + +* `description` - (Optional) The description for the Data Flow transformation. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The ID of the Data Factory Data Flow. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Factory Data Flow. +* `update` - (Defaults to 30 minutes) Used when updating the Data Factory Data Flow. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory Data Flow. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory Data Flow. + +## Import + +Data Factory Data Flow can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_data_flow.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example +```