Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add dynamic container arg to datafactory dataset #16350

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ ENHANCEMENTS:

BUG FIXES:

* `azurerm_datafactory_dataset_x` - Fix crash around `azure_blob_storage_location.0.dynamic_container_enabled` [GH-16514]
* `azurerm_resource_policy_remediation` - will no longer try to cancel a completed remediation task during deletion [GH-16478]

## 3.3.0 (April 21, 2022)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,21 @@ func TestAccDataFactoryDatasetBinary_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetBinary_blob_dynamics(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test")
r := DatasetBinaryResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob_dynamics(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func TestAccDataFactoryDatasetBinary_blob_with_filepath(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test")
r := DatasetBinaryResource{}
Expand Down Expand Up @@ -183,6 +198,61 @@ resource "azurerm_data_factory_dataset_binary" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetBinaryResource) blob_dynamics(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
data_factory_id = azurerm_data_factory.test.id
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_binary" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = "@concat(azurerm_storage_container.test.name, '')"
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "@concat('foo', '.txt')"
dynamic_filename_enabled = true
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetBinaryResource) blob_with_filepath(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,11 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {
Type: pluginsdk.TypeString,
Optional: true,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"dynamic_path_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,21 @@ func TestAccDataFactoryDatasetDelimitedText_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetDelimitedText_blob_dynamics(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob_dynamics(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func TestAccDataFactoryDatasetDelimitedText_blob_empty_path(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}
Expand Down Expand Up @@ -488,6 +503,70 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blob_dynamics(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
data_factory_id = azurerm_data_factory.test.id
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = "@concat(azurerm_storage_container.test.name, '')"
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "@concat('foo', '.txt')"
dynamic_filename_enabled = true
}

column_delimiter = ","
row_delimiter = "NEW"
encoding = "UTF-8"
quote_character = "x"
escape_character = "f"
first_row_as_header = true
null_value = "NULL"

}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blob_empty_path(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,11 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"dynamic_path_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,27 +77,13 @@ func TestAccDataFactoryDatasetJSON_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetJSON_blobDynamicContainer(t *testing.T) {
func TestAccDataFactoryDatasetJSON_blob_dynamics(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test")
r := DatasetJSONResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blobDynamicContainer(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blob(data),
Config: r.blob_dynamics(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
Expand Down Expand Up @@ -345,7 +331,7 @@ resource "azurerm_data_factory_dataset_json" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetJSONResource) blobDynamicContainer(data acceptance.TestData) string {
func (DatasetJSONResource) blob_dynamics(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
Expand Down Expand Up @@ -389,13 +375,16 @@ resource "azurerm_data_factory_dataset_json" "test" {
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
container = "@concat(azurerm_storage_container.test.name, '')"
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "foo.json"
dynamic_filename_enabled = false
filename = "@concat('foo', '.txt')"
dynamic_filename_enabled = true
}

encoding = "UTF-8"

}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,11 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"dynamic_path_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,27 +79,13 @@ func TestAccDataFactoryDatasetParquet_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetParquet_blobDynamicContainer(t *testing.T) {
func TestAccDataFactoryDatasetParquet_blob_dynamics(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_parquet", "test")
r := DatasetParquetResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blobDynamicContainer(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blob(data),
Config: r.blob_dynamics(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
Expand Down Expand Up @@ -342,7 +328,7 @@ resource "azurerm_data_factory_dataset_parquet" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetParquetResource) blobDynamicContainer(data acceptance.TestData) string {
func (DatasetParquetResource) blob_dynamics(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
Expand Down Expand Up @@ -386,10 +372,12 @@ resource "azurerm_data_factory_dataset_parquet" "test" {
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
container = "@concat(azurerm_storage_container.test.name, '')"
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "@concat('foo', '.txt')"
dynamic_filename_enabled = true
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
Expand Down