diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java index 81fdd79b8c12..0c73b9eb44f3 100644 --- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java +++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java @@ -1078,6 +1078,9 @@ private static void checkPartitionColumns(List columns, List columnMetadata) diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java index 01d40e0b2023..7e5b37c9e1bc 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java @@ -309,6 +309,24 @@ public void testPartitionColumnOrderIsDifferentFromTableDefinition() assertQuery("SELECT * FROM " + tableName, "VALUES (1, 'first#1', 'second#1'), (2, 'first#2', NULL), (3, NULL, 'second#3'), (4, NULL, NULL)"); } + @Test + public void testCreateTableWithAllPartitionColumns() + { + String tableName = "test_create_table_all_partition_columns_" + randomNameSuffix(); + assertQueryFails( + "CREATE TABLE " + tableName + "(part INT) WITH (partitioned_by = ARRAY['part'])", + "Using all columns for partition columns is unsupported"); + } + + @Test + public void testCreateTableAsSelectAllPartitionColumns() + { + String tableName = "test_create_table_all_partition_columns_" + randomNameSuffix(); + assertQueryFails( + "CREATE TABLE " + tableName + " WITH (partitioned_by = ARRAY['part']) AS SELECT 1 part", + "Using all columns for partition columns is unsupported"); + } + @Test public void testCreateTableWithUnsupportedPartitionType() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java index 4c72d6856db1..c545b166bf96 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java @@ -281,4 +281,43 @@ public void testCreateTableWithUnsupportedPartitionType() dropDeltaTableWithRetry("default." + tableName); } } + + @Test(groups = {DELTA_LAKE_DATABRICKS, PROFILE_SPECIFIC_TESTS}) + @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) + public void testCreateTableAsSelectWithAllPartitionColumns() + { + String tableName = "test_dl_ctas_with_all_partition_columns_" + randomNameSuffix(); + String tableDirectory = "databricks-compatibility-test-" + tableName; + + try { + assertThatThrownBy(() -> onTrino().executeQuery("" + + "CREATE TABLE delta.default." + tableName + " " + + "WITH (partitioned_by = ARRAY['part'], location = 's3://" + bucketName + "/" + tableDirectory + "')" + + "AS SELECT 1 part")) + .hasMessageContaining("Using all columns for partition columns is unsupported"); + assertThatThrownBy(() -> onTrino().executeQuery("" + + "CREATE TABLE delta.default." + tableName + " " + + "WITH (partitioned_by = ARRAY['part', 'another_part'], location = 's3://" + bucketName + "/" + tableDirectory + "')" + + "AS SELECT 1 part, 2 another_part")) + .hasMessageContaining("Using all columns for partition columns is unsupported"); + + assertThatThrownBy(() -> onDelta().executeQuery("" + + "CREATE TABLE default." + tableName + " " + + "USING DELTA " + + "PARTITIONED BY (part)" + + "LOCATION 's3://" + bucketName + "/" + tableDirectory + "'" + + "AS SELECT 1 part")) + .hasMessageContaining("Cannot use all columns for partition columns"); + assertThatThrownBy(() -> onDelta().executeQuery("" + + "CREATE TABLE default." + tableName + " " + + "USING DELTA " + + "PARTITIONED BY (part, another_part)" + + "LOCATION 's3://" + bucketName + "/" + tableDirectory + "'" + + "SELECT 1 part, 2 another_part")) + .hasMessageContaining("Cannot use all columns for partition columns"); + } + finally { + dropDeltaTableWithRetry("default." + tableName); + } + } } diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableCompatibility.java index 940081e693a2..b15b54aa5da8 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableCompatibility.java @@ -362,6 +362,41 @@ public void testCreateTableWithUnsupportedPartitionType() } } + @Test(groups = {DELTA_LAKE_DATABRICKS, PROFILE_SPECIFIC_TESTS}) + @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) + public void testCreateTableWithAllPartitionColumns() + { + String tableName = "test_dl_create_table_with_all_partition_columns_" + randomNameSuffix(); + String tableDirectory = "databricks-compatibility-test-" + tableName; + + try { + assertThatThrownBy(() -> onTrino().executeQuery("" + + "CREATE TABLE delta.default." + tableName + "(part int)" + + "WITH (partitioned_by = ARRAY['part'], location = 's3://" + bucketName + "/" + tableDirectory + "')")) + .hasMessageContaining("Using all columns for partition columns is unsupported"); + assertThatThrownBy(() -> onTrino().executeQuery("" + + "CREATE TABLE delta.default." + tableName + "(part int, another_part int)" + + "WITH (partitioned_by = ARRAY['part', 'another_part'], location = 's3://" + bucketName + "/" + tableDirectory + "')")) + .hasMessageContaining("Using all columns for partition columns is unsupported"); + + assertThatThrownBy(() -> onDelta().executeQuery("" + + "CREATE TABLE default." + tableName + "(part int)" + + "USING DELTA " + + "PARTITIONED BY (part)" + + "LOCATION 's3://" + bucketName + "/" + tableDirectory + "'")) + .hasMessageContaining("Cannot use all columns for partition columns"); + assertThatThrownBy(() -> onDelta().executeQuery("" + + "CREATE TABLE default." + tableName + "(part int, another_part int)" + + "USING DELTA " + + "PARTITIONED BY (part, another_part)" + + "LOCATION 's3://" + bucketName + "/" + tableDirectory + "'")) + .hasMessageContaining("Cannot use all columns for partition columns"); + } + finally { + dropDeltaTableWithRetry("default." + tableName); + } + } + private String getDatabricksDefaultTableProperties() { if (databricksRuntimeVersion.isAtLeast(DATABRICKS_113_RUNTIME_VERSION)) {