Skip to content

Commit

Permalink
Bugfix: azurerm_data_factory_dataset_delimited_text - Ensure option…
Browse files Browse the repository at this point in the history
…al variables are set correctly (#12921)
  • Loading branch information
henrymcconville authored Aug 12, 2021
1 parent 02e8617 commit 081d807
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,12 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {
},
"path": {
Type: pluginsdk.TypeString,
Required: true,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"filename": {
Type: pluginsdk.TypeString,
Required: true,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
},
Expand Down Expand Up @@ -319,16 +319,43 @@ func resourceDataFactoryDatasetDelimitedTextCreateUpdate(d *pluginsdk.ResourceDa
}

delimited_textDatasetProperties := datafactory.DelimitedTextDatasetTypeProperties{
Location: location,
ColumnDelimiter: d.Get("column_delimiter").(string),
RowDelimiter: d.Get("row_delimiter").(string),
EncodingName: d.Get("encoding").(string),
QuoteChar: d.Get("quote_character").(string),
EscapeChar: d.Get("escape_character").(string),
FirstRowAsHeader: d.Get("first_row_as_header").(bool),
NullValue: d.Get("null_value").(string),
CompressionLevel: d.Get("compression_level").(string),
CompressionCodec: d.Get("compression_codec").(string),
Location: location,
}

if v, ok := d.GetOk("column_delimiter"); ok {
delimited_textDatasetProperties.ColumnDelimiter = v.(string)
}

if v, ok := d.GetOk("row_delimiter"); ok {
delimited_textDatasetProperties.RowDelimiter = v.(string)
}

if v, ok := d.GetOk("encoding"); ok {
delimited_textDatasetProperties.EncodingName = v.(string)
}

if v, ok := d.GetOk("quote_character"); ok {
delimited_textDatasetProperties.QuoteChar = v.(string)
}

if v, ok := d.GetOk("escape_character"); ok {
delimited_textDatasetProperties.EscapeChar = v.(string)
}

if v, ok := d.GetOk("first_row_as_header"); ok {
delimited_textDatasetProperties.FirstRowAsHeader = v.(bool)
}

if v, ok := d.GetOk("null_value"); ok {
delimited_textDatasetProperties.NullValue = v.(string)
}

if v, ok := d.GetOk("compression_level"); ok {
delimited_textDatasetProperties.CompressionLevel = v.(string)
}

if v, ok := d.GetOk("compression_codec"); ok {
delimited_textDatasetProperties.CompressionCodec = v.(string)
}

linkedServiceName := d.Get("linked_service_name").(string)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,21 @@ func TestAccDataFactoryDatasetDelimitedText_http_update(t *testing.T) {
})
}

func TestAccDataFactoryDatasetDelimitedText_blob_basic(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob_basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func TestAccDataFactoryDatasetDelimitedText_blob(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}
Expand Down Expand Up @@ -315,6 +330,58 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blob_basic(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}
resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}
resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}
resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
connection_string = azurerm_storage_account.test.primary_connection_string
}
resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name
azure_blob_storage_location {
container = azurerm_storage_container.test.name
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blob(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down
18 changes: 9 additions & 9 deletions website/docs/r/data_factory_dataset_delimited_text.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -88,19 +88,19 @@ The following supported locations for a Delimited Text Dataset (exactly one of t

The following supported arguments are specific to Delimited Text Dataset:

* `column_delimiter` - (Required) The column delimiter.
* `column_delimiter` - (Optional) The column delimiter. Defaults to `,`.

* `row_delimiter` - (Required) The row delimiter.
* `row_delimiter` - (Optional) The row delimiter. Defaults to any of the following values on read: `\r\n`, `\r`, `\n`, and `\n` or `\r\n` on write by mapping data flow and Copy activity respectively.

* `encoding` - (Required) The encoding format for the file.
* `encoding` - (Optional) The encoding format for the file.

* `quote_character` - (Required) The quote character.
* `quote_character` - (Optional) The quote character. Defaults to `"`.

* `escape_character` - (Required) The escape character.
* `escape_character` - (Optional) The escape character. Defaults to `\`.

* `first_row_as_header` - (Required) When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data.
* `first_row_as_header` - (Optional) When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to `false`.

* `null_value` - (Required) The null value string.
* `null_value` - (Optional) The null value string. Defaults to an empty string.

* `compression_codec` - (Optional) The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case sensitive.

Expand Down Expand Up @@ -132,9 +132,9 @@ An `azure_blob_storage_location` block supports the following:

* `container` - (Required) The container on the Azure Blob Storage Account hosting the file.

* `path` - (Required) The folder path to the file.
* `path` - (Optional) The folder path to the file.

* `filename` - (Required) The filename of the file.
* `filename` - (Optional) The filename of the file.

---

Expand Down

0 comments on commit 081d807

Please sign in to comment.