diff --git a/azurerm/internal/services/datalake/parse/data_lake.go b/azurerm/internal/services/datalake/parse/data_lake.go new file mode 100644 index 000000000000..c29413d21353 --- /dev/null +++ b/azurerm/internal/services/datalake/parse/data_lake.go @@ -0,0 +1,33 @@ +package parse + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DataLakeStoreId struct { + Subscription string + ResourceGroup string + Name string +} + +func DataLakeStoreID(input string) (*DataLakeStoreId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("parsing Data Lake Store ID %q: %+v", input, err) + } + + dataLakeStore := DataLakeStoreId{ + ResourceGroup: id.ResourceGroup, + Subscription: id.SubscriptionID, + } + if dataLakeStore.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &dataLakeStore, nil +} diff --git a/azurerm/internal/services/datalake/parse/data_lake_test.go b/azurerm/internal/services/datalake/parse/data_lake_test.go new file mode 100644 index 000000000000..17ddd99e26dd --- /dev/null +++ b/azurerm/internal/services/datalake/parse/data_lake_test.go @@ -0,0 +1,75 @@ +package parse + +import "testing" + +func TestDataLakeStoreID(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *DataLakeStoreId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Resource Groups Segment", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + Expected: nil, + }, + { + Name: "Resource Group ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + Expected: nil, + }, + { + Name: "Missing Account Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/", + Expected: nil, + }, + { + Name: "Data lake account ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1", + Expected: &DataLakeStoreId{ + Name: "account1", + ResourceGroup: "resGroup1", + Subscription: "00000000-0000-0000-0000-000000000000", + }, + }, + { + Name: "Wrong Casing", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/Accounts/account1", + Expected: nil, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.Name) + + actual, err := DataLakeStoreID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.Subscription != v.Expected.Subscription { + t.Fatalf("Expected %q but got %q for Subscription", v.Expected.Subscription, actual.Subscription) + } + + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/registration.go b/azurerm/internal/services/datashare/registration.go index 758d5664a94a..23a8546de59d 100644 --- a/azurerm/internal/services/datashare/registration.go +++ b/azurerm/internal/services/datashare/registration.go @@ -28,8 +28,9 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_share_account": resourceArmDataShareAccount(), - "azurerm_data_share": resourceArmDataShare(), - "azurerm_data_share_dataset_blob_storage": resourceArmDataShareDataSetBlobStorage(), + "azurerm_data_share_account": resourceArmDataShareAccount(), + "azurerm_data_share": resourceArmDataShare(), + "azurerm_data_share_dataset_blob_storage": resourceArmDataShareDataSetBlobStorage(), + "azurerm_data_share_dataset_data_lake_gen1": resourceArmDataShareDataSetDataLakeGen1(), } } diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go new file mode 100644 index 000000000000..77df25952780 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go @@ -0,0 +1,223 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + dataLakeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetDataLakeGen1() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetDataLakeGen1Create, + Read: resourceArmDataShareDataSetDataLakeGen1Read, + Delete: resourceArmDataShareDataSetDataLakeGen1Delete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "data_share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "data_lake_store_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatalakeStoreID, + }, + + "folder_path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func resourceArmDataShareDataSetDataLakeGen1Create(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("data_share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen1", *existingId) + } + + dataLakeStoreId, err := dataLakeParse.DataLakeStoreID(d.Get("data_lake_store_id").(string)) + if err != nil { + return err + } + + var dataSet datashare.BasicDataSet + + if fileName, ok := d.GetOk("file_name"); ok { + dataSet = datashare.ADLSGen1FileDataSet{ + Kind: datashare.KindAdlsGen1File, + ADLSGen1FileProperties: &datashare.ADLSGen1FileProperties{ + AccountName: utils.String(dataLakeStoreId.Name), + ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), + SubscriptionID: utils.String(dataLakeStoreId.Subscription), + FolderPath: utils.String(d.Get("folder_path").(string)), + FileName: utils.String(fileName.(string)), + }, + } + } else { + dataSet = datashare.ADLSGen1FolderDataSet{ + Kind: datashare.KindAdlsGen1Folder, + ADLSGen1FolderProperties: &datashare.ADLSGen1FolderProperties{ + AccountName: utils.String(dataLakeStoreId.Name), + ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), + SubscriptionID: utils.String(dataLakeStoreId.Subscription), + FolderPath: utils.String(d.Get("folder_path").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating/updating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetDataLakeGen1Read(d, meta) +} + +func resourceArmDataShareDataSetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("reading ID of DataShare %q (Resource Group %q / accountName %q): ID is empt", id.ShareName, id.ResourceGroup, id.AccountName) + } + d.Set("data_share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.ADLSGen1FileDataSet: + if props := resp.ADLSGen1FileProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("file_name", props.FileName) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen1FolderDataSet: + if props := resp.ADLSGen1FolderProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceArmDataShareDataSetDataLakeGen1Delete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go new file mode 100644 index 000000000000..6ef6e981f4f4 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go @@ -0,0 +1,174 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataShareDataSetDataLakeGen1File_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport), + }, + }) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_DS_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctestdls%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + account_name = azurerm_data_lake_store.test.name + local_file_path = "./testdata/application_gateway_test.cer" + remote_file_path = "/test/application_gateway_test.cer" +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_data_lake_store.test.id + role_definition_name = "Owner" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-DSDL1-%d" + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_lake_store.test.id + file_name = "application_gateway_test.cer" + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-DSDL1-%d" + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_lake_store.test.id + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data) + return fmt.Sprintf(` +%s +resource "azurerm_data_share_dataset_data_lake_gen1" "import" { + name = azurerm_data_share_dataset_data_lake_gen1.test.name + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_id + folder_path = azurerm_data_share_dataset_data_lake_gen1.test.folder_path +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer b/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer new file mode 100644 index 000000000000..577947b220d7 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbzCCAlegAwIBAgIJAIzjRD36sIbbMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNV +BAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAl0ZXJyYWZvcm0x +FTATBgNVBAMMDHRlcnJhZm9ybS5pbzAgFw0xNzA0MjEyMDA1MjdaGA8yMTE3MDMy +ODIwMDUyN1owTTELMAkGA1UEBhMCVVMxEzARBgNVBAgMClNvbWUtU3RhdGUxEjAQ +BgNVBAoMCXRlcnJhZm9ybTEVMBMGA1UEAwwMdGVycmFmb3JtLmlvMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3L9L5szT4+FLykTFNyyPjy/k3BQTYAfR +QzP2dhnsuUKm3cdPC0NyZ+wEXIUGhoDO2YG6EYChOl8fsDqDOjloSUGKqYw++nlp +HIuUgJx8IxxG2XkALCjFU7EmF+w7kn76d0ezpEIYxnLP+KG2DVornoEt1aLhv1ML +mpgEZZPhDbMSLhSYWeTVRMayXLwqtfgnDumQSB+8d/1JuJqrSI4pD12JozVThzb6 +hsjfb6RMX4epPmrGn0PbTPEEA6awmsxBCXB0s13nNQt/O0hLM2agwvAyozilQV+s +616Ckgk6DJoUkqZhDy7vPYMIRSr98fBws6zkrV6tTLjmD8xAvobePQIDAQABo1Aw +TjAdBgNVHQ4EFgQUXIqO421zMMmbcRRX9wctZFCQuPIwHwYDVR0jBBgwFoAUXIqO +421zMMmbcRRX9wctZFCQuPIwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AQEAr82NeT3BYJOKLlUL6Om5LjUF66ewcJjG9ltdvyQwVneMcq7t5UAPxgChzqNR +Vk4da8PzkXpjBJyWezHupdJNX3XqeUk2kSxqQ6/gmhqvfI3y7djrwoO6jvMEY26W +qtkTNORWDP3THJJVimC3zV+KMU5UBVrEzhOVhHSU709lBP75o0BBn3xGsPqSq9k8 +IotIFfyAc6a+XP3+ZMpvh7wqAUml7vWa5wlcXExCx39h1balfDSLGNC4swWPCp9A +MnQR0p+vMay9hNP1Eh+9QYUai14d5KS3cFV+KxE1cJR5HD/iLltnnOEbpMsB0eVO +ZWkFvE7Y5lW0oVSAfin5TwTJMQ== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/azurerm/internal/services/datashare/validate/data_share.go b/azurerm/internal/services/datashare/validate/data_share.go index 0176f55ce38b..c043d6f4cc14 100644 --- a/azurerm/internal/services/datashare/validate/data_share.go +++ b/azurerm/internal/services/datashare/validate/data_share.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + dataLakeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" ) @@ -60,3 +61,17 @@ func DatashareDataSetName() schema.SchemaValidateFunc { regexp.MustCompile(`^[\w-]{2,90}$`), `Dataset name can only contain number, letters, - and _, and must be between 2 and 90 characters long.`, ) } + +func DatalakeStoreID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return warnings, errors + } + + if _, err := dataLakeParse.DataLakeStoreID(v); err != nil { + errors = append(errors, fmt.Errorf("can not parse %q as a Data Lake Store id: %v", k, err)) + } + + return warnings, errors +} diff --git a/website/azurerm.erb b/website/azurerm.erb index 6d17486ebce1..65c989030578 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -1500,6 +1500,9 @@