Skip to content

Commit

Permalink
Add comment and test for adding not null column in Iceberg
Browse files Browse the repository at this point in the history
  • Loading branch information
ebyhr committed Sep 5, 2022
1 parent a4c9315 commit 3c35de0
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -1358,7 +1358,10 @@ private static Term toIcebergTerm(Schema schema, PartitionField partitionField)
@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column)
{
// TODO https://github.com/trinodb/trino/issues/13587 Allow NOT NULL constraint when the table is empty
// Spark doesn't support adding a NOT NULL column to Iceberg tables
// Also, Spark throws an exception when reading the table if we add such columns and execute a rollback procedure
// because they keep returning the latest table definition even after the rollback https://github.com/apache/iceberg/issues/5591
// Even when a table is empty, this connector doesn't support adding not null columns to avoid the above Spark failure
if (!column.isNullable()) {
throw new TrinoException(NOT_SUPPORTED, "This connector does not support adding not null columns");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1998,6 +1998,25 @@ public void testUpdateOnPartitionColumn()
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}

@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testAddNotNullColumn()
{
String baseTableName = "test_add_not_null_column_" + randomTableSuffix();
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);

onTrino().executeQuery("CREATE TABLE " + trinoTableName + " AS SELECT 1 col");
assertThat(onTrino().executeQuery("SELECT * FROM " + trinoTableName)).containsOnly(row(1));

assertQueryFailure(() -> onTrino().executeQuery("ALTER TABLE " + trinoTableName + " ADD COLUMN new_col INT NOT NULL"))
.hasMessageMatching(".*This connector does not support adding not null columns");
assertQueryFailure(() -> onSpark().executeQuery("ALTER TABLE " + sparkTableName + " ADD COLUMN new_col INT NOT NULL"))
.hasMessageMatching("(?s).*Unsupported table change: Incompatible change: cannot add required column.*");

assertThat(onTrino().executeQuery("SELECT * FROM " + trinoTableName)).containsOnly(row(1));
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}

@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testHandlingPartitionSchemaEvolutionInPartitionMetadata()
{
Expand Down

0 comments on commit 3c35de0

Please sign in to comment.