Skip to content

Commit

Permalink
Upgrade docker-images to version 81
Browse files Browse the repository at this point in the history
This upgrades Delta Lake version to 2.4.0

Change delta.minWriterVersion to 7. Otherwise, Spark throws
delta.minWriterVersion needs to be one of 1, 2, 3, 4, 5, 7 error.
  • Loading branch information
ebyhr committed Jul 27, 2023
1 parent c630f2c commit bec8b7d
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 17 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@
<dep.jna.version>5.13.0</dep.jna.version>
<dep.okio.version>3.3.0</dep.okio.version>

<dep.docker.images.version>80</dep.docker.images.version>
<dep.docker.images.version>81</dep.docker.images.version>

<!--
America/Bahia_Banderas has:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_113;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_73;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
Expand Down Expand Up @@ -70,7 +71,7 @@ public void testAddColumnWithCommentOnTrino()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_EXCLUDE_113, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testAddColumnUnsupportedWriterVersion()
{
Expand All @@ -80,14 +81,14 @@ public void testAddColumnUnsupportedWriterVersion()
onDelta().executeQuery(format("" +
"CREATE TABLE default.%s (col int) " +
"USING DELTA LOCATION 's3://%s/%s'" +
"TBLPROPERTIES ('delta.minWriterVersion'='6')",
"TBLPROPERTIES ('delta.minWriterVersion'='7')",
tableName,
bucketName,
tableDirectory));

try {
assertQueryFailure(() -> onTrino().executeQuery("ALTER TABLE delta.default." + tableName + " ADD COLUMN new_col int"))
.hasMessageMatching(".* Table .* requires Delta Lake writer version 6 which is not supported");
.hasMessageMatching(".* Table .* requires Delta Lake writer version 7 which is not supported");
}
finally {
dropDeltaTableWithRetry("default." + tableName);
Expand Down Expand Up @@ -193,14 +194,14 @@ public void testCommentOnTableUnsupportedWriterVersion()
onDelta().executeQuery(format("" +
"CREATE TABLE default.%s (col int) " +
"USING DELTA LOCATION 's3://%s/%s'" +
"TBLPROPERTIES ('delta.minWriterVersion'='6')",
"TBLPROPERTIES ('delta.minWriterVersion'='7')",
tableName,
bucketName,
tableDirectory));

try {
assertQueryFailure(() -> onTrino().executeQuery("COMMENT ON TABLE delta.default." + tableName + " IS 'test comment'"))
.hasMessageMatching(".* Table .* requires Delta Lake writer version 6 which is not supported");
.hasMessageMatching(".* Table .* requires Delta Lake writer version 7 which is not supported");
}
finally {
onTrino().executeQuery("DROP TABLE delta.default." + tableName);
Expand Down Expand Up @@ -239,21 +240,21 @@ public void testCommentOnColumnUnsupportedWriterVersion()
onDelta().executeQuery(format("" +
"CREATE TABLE default.%s (col int) " +
"USING DELTA LOCATION 's3://%s/%s'" +
"TBLPROPERTIES ('delta.minWriterVersion'='6')",
"TBLPROPERTIES ('delta.minWriterVersion'='7')",
tableName,
bucketName,
tableDirectory));

try {
assertQueryFailure(() -> onTrino().executeQuery("COMMENT ON COLUMN delta.default." + tableName + ".col IS 'test column comment'"))
.hasMessageMatching(".* Table .* requires Delta Lake writer version 6 which is not supported");
.hasMessageMatching(".* Table .* requires Delta Lake writer version 7 which is not supported");
}
finally {
onTrino().executeQuery("DROP TABLE delta.default." + tableName);
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_EXCLUDE_113, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testOptimizeUnsupportedWriterVersion()
{
Expand All @@ -263,14 +264,14 @@ public void testOptimizeUnsupportedWriterVersion()
onDelta().executeQuery(format("" +
"CREATE TABLE default.%s (col int) " +
"USING DELTA LOCATION 's3://%s/%s'" +
"TBLPROPERTIES ('delta.minWriterVersion'='6')",
"TBLPROPERTIES ('delta.minWriterVersion'='7')",
tableName,
bucketName,
tableDirectory));

try {
assertQueryFailure(() -> onTrino().executeQuery("ALTER TABLE delta.default." + tableName + " EXECUTE OPTIMIZE"))
.hasMessageMatching(".* Table .* requires Delta Lake writer version 6 which is not supported");
.hasMessageMatching(".* Table .* requires Delta Lake writer version 7 which is not supported");
}
finally {
dropDeltaTableWithRetry(tableName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,8 @@ public void testDeleteOnAppendOnlyTableFails()
onTrino().executeQuery("DROP TABLE " + tableName);
}

// Databricks 12.1 added support for deletion vectors
// TODO: Add DELTA_LAKE_OSS group once they support creating a table with deletion vectors
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_EXCLUDE_113, PROFILE_SPECIFIC_TESTS})
// Databricks 12.1 and OSS Delta 2.4.0 added support for deletion vectors
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_EXCLUDE_113, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testDeletionVectors()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_113;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_73;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
Expand Down Expand Up @@ -389,7 +391,7 @@ private void testVacuumRemoveChangeDataFeedFiles(Consumer<String> vacuumExecutor
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_73, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_EXCLUDE_113, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testVacuumUnsupportedWriterVersion()
{
Expand All @@ -400,10 +402,10 @@ public void testVacuumUnsupportedWriterVersion()
"(a INT)" +
"USING DELTA " +
"LOCATION '" + ("s3://" + bucketName + "/" + directoryName) + "'" +
"TBLPROPERTIES ('delta.minWriterVersion'='6')");
"TBLPROPERTIES ('delta.minWriterVersion'='7')");
try {
assertThatThrownBy(() -> onTrino().executeQuery("CALL delta.system.vacuum('default', '" + tableName + "', '7d')"))
.hasMessageContaining("Cannot execute vacuum procedure with 6 writer version");
.hasMessageContaining("Cannot execute vacuum procedure with 7 writer version");
}
finally {
dropDeltaTableWithRetry("default." + tableName);
Expand Down

0 comments on commit bec8b7d

Please sign in to comment.