Skip to content

Commit

Permalink
new resource "azurerm_data_factory_data_flow"
Browse files Browse the repository at this point in the history
  • Loading branch information
njuCZ committed Jul 14, 2021
1 parent 0e528be commit f7c5d8b
Show file tree
Hide file tree
Showing 11 changed files with 1,473 additions and 0 deletions.
5 changes: 5 additions & 0 deletions azurerm/internal/services/datafactory/client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
)

type Client struct {
DataFlowClient *datafactory.DataFlowsClient
DatasetClient *datafactory.DatasetsClient
FactoriesClient *datafactory.FactoriesClient
IntegrationRuntimesClient *datafactory.IntegrationRuntimesClient
Expand All @@ -15,6 +16,9 @@ type Client struct {
}

func NewClient(o *common.ClientOptions) *Client {
dataFlowClient := datafactory.NewDataFlowsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&dataFlowClient.Client, o.ResourceManagerAuthorizer)

DatasetClient := datafactory.NewDatasetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&DatasetClient.Client, o.ResourceManagerAuthorizer)

Expand All @@ -34,6 +38,7 @@ func NewClient(o *common.ClientOptions) *Client {
o.ConfigureClient(&TriggersClient.Client, o.ResourceManagerAuthorizer)

return &Client{
DataFlowClient: &dataFlowClient,
DatasetClient: &DatasetClient,
FactoriesClient: &FactoriesClient,
IntegrationRuntimesClient: &IntegrationRuntimesClient,
Expand Down
261 changes: 261 additions & 0 deletions azurerm/internal/services/datafactory/data_factory_data_flow.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,261 @@
package datafactory

import (
"fmt"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func SchemaForDataFlowSourceAndSink() *pluginsdk.Schema {
return &pluginsdk.Schema{
Type: pluginsdk.TypeList,
Required: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"description": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"dataset": {
Type: pluginsdk.TypeList,
Optional: true,
MaxItems: 1,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"parameters": {
Type: pluginsdk.TypeMap,
Optional: true,
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
},
},
},
},

"linked_service": {
Type: pluginsdk.TypeList,
Optional: true,
MaxItems: 1,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"parameters": {
Type: pluginsdk.TypeMap,
Optional: true,
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
},
},
},
},

"schema_linked_service": {
Type: pluginsdk.TypeList,
Optional: true,
MaxItems: 1,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"parameters": {
Type: pluginsdk.TypeMap,
Optional: true,
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
},
},
},
},
},
},
}
}

func expandDataFactoryDataFlowSource(input []interface{}) (*[]datafactory.DataFlowSource, error) {
if len(input) == 0 || input[0] == nil {
return nil, nil
}

result := make([]datafactory.DataFlowSource, 0)
for _, v := range input {
raw := v.(map[string]interface{})
item := datafactory.DataFlowSource{
Description: utils.String(raw["description"].(string)),
Name: utils.String(raw["name"].(string)),
Dataset: expandDataFactoryDatasetReference(raw["dataset"].([]interface{})),
LinkedService: expandDataFactoryLinkedServiceReference(raw["linked_service"].([]interface{})),
SchemaLinkedService: expandDataFactoryLinkedServiceReference(raw["schema_linked_service"].([]interface{})),
}
if item.Dataset == nil && item.LinkedService == nil && item.SchemaLinkedService == nil {
return nil, fmt.Errorf("At least one of `dataset`, `linked_service`, `schema_linked_service` for a `source` must be set.")
}
result = append(result, item)
}
return &result, nil
}

func expandDataFactoryDataFlowSink(input []interface{}) (*[]datafactory.DataFlowSink, error) {
if len(input) == 0 || input[0] == nil {
return nil, nil
}

result := make([]datafactory.DataFlowSink, 0)
for _, v := range input {
raw := v.(map[string]interface{})
item := datafactory.DataFlowSink{
Description: utils.String(raw["description"].(string)),
Name: utils.String(raw["name"].(string)),
Dataset: expandDataFactoryDatasetReference(raw["dataset"].([]interface{})),
LinkedService: expandDataFactoryLinkedServiceReference(raw["linked_service"].([]interface{})),
SchemaLinkedService: expandDataFactoryLinkedServiceReference(raw["schema_linked_service"].([]interface{})),
}
if item.Dataset == nil && item.LinkedService == nil && item.SchemaLinkedService == nil {
return nil, fmt.Errorf("At least one of `dataset`, `linked_service`, `schema_linked_service` for a `sink` must be set.")
}
result = append(result, item)
}
return &result, nil
}

func expandDataFactoryDatasetReference(input []interface{}) *datafactory.DatasetReference {
if len(input) == 0 || input[0] == nil {
return nil
}

raw := input[0].(map[string]interface{})
return &datafactory.DatasetReference{
Type: utils.String("DatasetReference"),
ReferenceName: utils.String(raw["name"].(string)),
Parameters: raw["parameters"].(map[string]interface{}),
}
}

func expandDataFactoryLinkedServiceReference(input []interface{}) *datafactory.LinkedServiceReference {
if len(input) == 0 || input[0] == nil {
return nil
}

raw := input[0].(map[string]interface{})
return &datafactory.LinkedServiceReference{
Type: utils.String("LinkedServiceReference"),
ReferenceName: utils.String(raw["name"].(string)),
Parameters: raw["parameters"].(map[string]interface{}),
}
}

func flattenDataFactoryDataFlowSource(input *[]datafactory.DataFlowSource) []interface{} {
if input == nil {
return []interface{}{}
}

result := make([]interface{}, 0)
for _, v := range *input {
name := ""
description := ""
if v.Name != nil {
name = *v.Name
}
if v.Description != nil {
description = *v.Description
}
result = append(result, map[string]interface{}{
"name": name,
"description": description,
"dataset": flattenDataFactoryDatasetReference(v.Dataset),
"linked_service": flattenDataFactoryLinkedServiceReference(v.LinkedService),
"schema_linked_service": flattenDataFactoryLinkedServiceReference(v.SchemaLinkedService),
})
}
return result
}

func flattenDataFactoryDataFlowSink(input *[]datafactory.DataFlowSink) []interface{} {
if input == nil {
return []interface{}{}
}

result := make([]interface{}, 0)
for _, v := range *input {
name := ""
description := ""
if v.Name != nil {
name = *v.Name
}
if v.Description != nil {
description = *v.Description
}
result = append(result, map[string]interface{}{
"name": name,
"description": description,
"dataset": flattenDataFactoryDatasetReference(v.Dataset),
"linked_service": flattenDataFactoryLinkedServiceReference(v.LinkedService),
"schema_linked_service": flattenDataFactoryLinkedServiceReference(v.SchemaLinkedService),
})
}
return result
}

func flattenDataFactoryDatasetReference(input *datafactory.DatasetReference) []interface{} {
if input == nil {
return []interface{}{}
}

name := ""
if input.ReferenceName != nil {
name = *input.ReferenceName
}

return []interface{}{
map[string]interface{}{
"name": name,
"parameters": input.Parameters,
},
}
}

func flattenDataFactoryLinkedServiceReference(input *datafactory.LinkedServiceReference) []interface{} {
if input == nil {
return []interface{}{}
}

name := ""
if input.ReferenceName != nil {
name = *input.ReferenceName
}

return []interface{}{
map[string]interface{}{
"name": name,
"parameters": input.Parameters,
},
}
}
Loading

0 comments on commit f7c5d8b

Please sign in to comment.