Skip to content

Commit

Permalink
Remove redundant try-finally in some tests
Browse files Browse the repository at this point in the history
The whole database state is ephemeral.
  • Loading branch information
findepi authored and losipiuk committed Feb 24, 2022
1 parent 91f4580 commit 20dbfe6
Showing 1 changed file with 89 additions and 113 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3071,149 +3071,125 @@ public void testAmbiguousColumnsWithDots()
public void testSchemaEvolutionWithDereferenceProjections()
{
// Fields are identified uniquely based on unique id's. If a column is dropped and recreated with the same name it should not return dropped data.
try {
assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a row(b BIGINT, c VARCHAR))");
assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(1, 'abc'))", 1);
assertUpdate("ALTER TABLE evolve_test DROP COLUMN a");
assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(b VARCHAR, c BIGINT)");
assertQuery("SELECT a.b FROM evolve_test", "VALUES NULL");
}
finally {
assertUpdate("DROP TABLE IF EXISTS evolve_test");
}
assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a row(b BIGINT, c VARCHAR))");
assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(1, 'abc'))", 1);
assertUpdate("ALTER TABLE evolve_test DROP COLUMN a");
assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(b VARCHAR, c BIGINT)");
assertQuery("SELECT a.b FROM evolve_test", "VALUES NULL");
assertUpdate("DROP TABLE evolve_test");

// Very changing subfield ordering does not revive dropped data
try {
assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a ROW(b BIGINT, c VARCHAR), d BIGINT) with (partitioning = ARRAY['d'])");
assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(2, 'abc'), 3)", 1);
assertUpdate("ALTER TABLE evolve_test DROP COLUMN a");
assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(c VARCHAR, b BIGINT)");
assertUpdate("INSERT INTO evolve_test VALUES (4, 5, ROW('def', 6))", 1);
assertQuery("SELECT a.b FROM evolve_test WHERE d = 3", "VALUES NULL");
assertQuery("SELECT a.b FROM evolve_test WHERE d = 5", "VALUES 6");
}
finally {
assertUpdate("DROP TABLE IF EXISTS evolve_test");
}
assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a ROW(b BIGINT, c VARCHAR), d BIGINT) with (partitioning = ARRAY['d'])");
assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(2, 'abc'), 3)", 1);
assertUpdate("ALTER TABLE evolve_test DROP COLUMN a");
assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(c VARCHAR, b BIGINT)");
assertUpdate("INSERT INTO evolve_test VALUES (4, 5, ROW('def', 6))", 1);
assertQuery("SELECT a.b FROM evolve_test WHERE d = 3", "VALUES NULL");
assertQuery("SELECT a.b FROM evolve_test WHERE d = 5", "VALUES 6");
assertUpdate("DROP TABLE evolve_test");
}

@Test
public void testHighlyNestedData()
{
try {
assertUpdate("CREATE TABLE nested_data (id INT, row_t ROW(f1 INT, f2 INT, row_t ROW (f1 INT, f2 INT, row_t ROW(f1 INT, f2 INT))))");
assertUpdate("INSERT INTO nested_data VALUES (1, ROW(2, 3, ROW(4, 5, ROW(6, 7)))), (11, ROW(12, 13, ROW(14, 15, ROW(16, 17))))", 2);
assertUpdate("INSERT INTO nested_data VALUES (21, ROW(22, 23, ROW(24, 25, ROW(26, 27))))", 1);

// Test select projected columns, with and without their parent column
assertQuery("SELECT id, row_t.row_t.row_t.f2 FROM nested_data", "VALUES (1, 7), (11, 17), (21, 27)");
assertQuery("SELECT id, row_t.row_t.row_t.f2, CAST(row_t AS JSON) FROM nested_data",
"VALUES (1, 7, '{\"f1\":2,\"f2\":3,\"row_t\":{\"f1\":4,\"f2\":5,\"row_t\":{\"f1\":6,\"f2\":7}}}'), " +
"(11, 17, '{\"f1\":12,\"f2\":13,\"row_t\":{\"f1\":14,\"f2\":15,\"row_t\":{\"f1\":16,\"f2\":17}}}'), " +
"(21, 27, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");

// Test predicates on immediate child column and deeper nested column
assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":26,\"f2\":27}')");
assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":26,\"f2\":27}')");
assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27",
"VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");
assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20",
"VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");

// Test predicates on parent columns
assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t.row_t.row_t = ROW(16, 17)", "VALUES (11, 16)");
assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t = ROW(22, 23, ROW(24, 25, ROW(26, 27)))", "VALUES (21, 26)");
}
finally {
assertUpdate("DROP TABLE IF EXISTS nested_data");
}
assertUpdate("CREATE TABLE nested_data (id INT, row_t ROW(f1 INT, f2 INT, row_t ROW (f1 INT, f2 INT, row_t ROW(f1 INT, f2 INT))))");
assertUpdate("INSERT INTO nested_data VALUES (1, ROW(2, 3, ROW(4, 5, ROW(6, 7)))), (11, ROW(12, 13, ROW(14, 15, ROW(16, 17))))", 2);
assertUpdate("INSERT INTO nested_data VALUES (21, ROW(22, 23, ROW(24, 25, ROW(26, 27))))", 1);

// Test select projected columns, with and without their parent column
assertQuery("SELECT id, row_t.row_t.row_t.f2 FROM nested_data", "VALUES (1, 7), (11, 17), (21, 27)");
assertQuery("SELECT id, row_t.row_t.row_t.f2, CAST(row_t AS JSON) FROM nested_data",
"VALUES (1, 7, '{\"f1\":2,\"f2\":3,\"row_t\":{\"f1\":4,\"f2\":5,\"row_t\":{\"f1\":6,\"f2\":7}}}'), " +
"(11, 17, '{\"f1\":12,\"f2\":13,\"row_t\":{\"f1\":14,\"f2\":15,\"row_t\":{\"f1\":16,\"f2\":17}}}'), " +
"(21, 27, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");

// Test predicates on immediate child column and deeper nested column
assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":26,\"f2\":27}')");
assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":26,\"f2\":27}')");
assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27",
"VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");
assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20",
"VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')");

// Test predicates on parent columns
assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t.row_t.row_t = ROW(16, 17)", "VALUES (11, 16)");
assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t = ROW(22, 23, ROW(24, 25, ROW(26, 27)))", "VALUES (21, 26)");

assertUpdate("DROP TABLE IF EXISTS nested_data");
}

@Test
public void testProjectionPushdownAfterRename()
{
try {
assertUpdate("CREATE TABLE projection_pushdown_after_rename (id INT, a ROW(b INT, c ROW (d INT)))");
assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (1, ROW(2, ROW(3))), (11, ROW(12, ROW(13)))", 2);
assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (21, ROW(22, ROW(23)))", 1);

String expected = "VALUES (11, JSON '{\"b\":12,\"c\":{\"d\":13}}', 13)";
assertQuery("SELECT id, CAST(a AS JSON), a.c.d FROM projection_pushdown_after_rename WHERE a.b = 12", expected);
assertUpdate("ALTER TABLE projection_pushdown_after_rename RENAME COLUMN a TO row_t");
assertQuery("SELECT id, CAST(row_t AS JSON), row_t.c.d FROM projection_pushdown_after_rename WHERE row_t.b = 12", expected);
}
finally {
assertUpdate("DROP TABLE IF EXISTS projection_pushdown_after_rename");
}
assertUpdate("CREATE TABLE projection_pushdown_after_rename (id INT, a ROW(b INT, c ROW (d INT)))");
assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (1, ROW(2, ROW(3))), (11, ROW(12, ROW(13)))", 2);
assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (21, ROW(22, ROW(23)))", 1);

String expected = "VALUES (11, JSON '{\"b\":12,\"c\":{\"d\":13}}', 13)";
assertQuery("SELECT id, CAST(a AS JSON), a.c.d FROM projection_pushdown_after_rename WHERE a.b = 12", expected);
assertUpdate("ALTER TABLE projection_pushdown_after_rename RENAME COLUMN a TO row_t");
assertQuery("SELECT id, CAST(row_t AS JSON), row_t.c.d FROM projection_pushdown_after_rename WHERE row_t.b = 12", expected);

assertUpdate("DROP TABLE IF EXISTS projection_pushdown_after_rename");
}

@Test
public void testProjectionWithCaseSensitiveField()
{
try {
assertUpdate("CREATE TABLE projection_with_case_sensitive_field (id INT, a ROW(\"UPPER_CASE\" INT, \"lower_case\" INT, \"MiXeD_cAsE\" INT))");
assertUpdate("INSERT INTO projection_with_case_sensitive_field VALUES (1, ROW(2, 3, 4)), (5, ROW(6, 7, 8))", 2);
assertUpdate("CREATE TABLE projection_with_case_sensitive_field (id INT, a ROW(\"UPPER_CASE\" INT, \"lower_case\" INT, \"MiXeD_cAsE\" INT))");
assertUpdate("INSERT INTO projection_with_case_sensitive_field VALUES (1, ROW(2, 3, 4)), (5, ROW(6, 7, 8))", 2);

String expected = "VALUES (2, 3, 4), (6, 7, 8)";
assertQuery("SELECT a.UPPER_CASE, a.lower_case, a.MiXeD_cAsE FROM projection_with_case_sensitive_field", expected);
assertQuery("SELECT a.upper_case, a.lower_case, a.mixed_case FROM projection_with_case_sensitive_field", expected);
assertQuery("SELECT a.UPPER_CASE, a.LOWER_CASE, a.MIXED_CASE FROM projection_with_case_sensitive_field", expected);
}
finally {
assertUpdate("DROP TABLE IF EXISTS projection_with_case_sensitive_field");
}
String expected = "VALUES (2, 3, 4), (6, 7, 8)";
assertQuery("SELECT a.UPPER_CASE, a.lower_case, a.MiXeD_cAsE FROM projection_with_case_sensitive_field", expected);
assertQuery("SELECT a.upper_case, a.lower_case, a.mixed_case FROM projection_with_case_sensitive_field", expected);
assertQuery("SELECT a.UPPER_CASE, a.LOWER_CASE, a.MIXED_CASE FROM projection_with_case_sensitive_field", expected);

assertUpdate("DROP TABLE IF EXISTS projection_with_case_sensitive_field");
}

@Test
public void testProjectionPushdownReadsLessData()
{
String largeVarchar = "ZZZ".repeat(1000);
try {
assertUpdate("CREATE TABLE projection_pushdown_reads_less_data (id INT, a ROW(b VARCHAR, c INT))");
assertUpdate(
format("INSERT INTO projection_pushdown_reads_less_data VALUES (1, ROW('%s', 3)), (11, ROW('%1$s', 13)), (21, ROW('%1$s', 23)), (31, ROW('%1$s', 33))", largeVarchar),
4);

String selectQuery = "SELECT a.c FROM projection_pushdown_reads_less_data";
Set<Integer> expected = ImmutableSet.of(3, 13, 23, 33);
Session sessionWithoutPushdown = Session.builder(getSession())
.setCatalogSessionProperty(ICEBERG_CATALOG, "projection_pushdown_enabled", "false")
.build();

assertQueryStats(
getSession(),
selectQuery,
statsWithPushdown -> {
DataSize processedDataSizeWithPushdown = statsWithPushdown.getProcessedInputDataSize();
assertQueryStats(
sessionWithoutPushdown,
selectQuery,
statsWithoutPushdown -> assertThat(statsWithoutPushdown.getProcessedInputDataSize()).isGreaterThan(processedDataSizeWithPushdown),
results -> assertEquals(results.getOnlyColumnAsSet(), expected));
},
results -> assertEquals(results.getOnlyColumnAsSet(), expected));
}
finally {
assertUpdate("DROP TABLE IF EXISTS projection_pushdown_reads_less_data");
}
assertUpdate("CREATE TABLE projection_pushdown_reads_less_data (id INT, a ROW(b VARCHAR, c INT))");
assertUpdate(
format("INSERT INTO projection_pushdown_reads_less_data VALUES (1, ROW('%s', 3)), (11, ROW('%1$s', 13)), (21, ROW('%1$s', 23)), (31, ROW('%1$s', 33))", largeVarchar),
4);

String selectQuery = "SELECT a.c FROM projection_pushdown_reads_less_data";
Set<Integer> expected = ImmutableSet.of(3, 13, 23, 33);
Session sessionWithoutPushdown = Session.builder(getSession())
.setCatalogSessionProperty(ICEBERG_CATALOG, "projection_pushdown_enabled", "false")
.build();

assertQueryStats(
getSession(),
selectQuery,
statsWithPushdown -> {
DataSize processedDataSizeWithPushdown = statsWithPushdown.getProcessedInputDataSize();
assertQueryStats(
sessionWithoutPushdown,
selectQuery,
statsWithoutPushdown -> assertThat(statsWithoutPushdown.getProcessedInputDataSize()).isGreaterThan(processedDataSizeWithPushdown),
results -> assertEquals(results.getOnlyColumnAsSet(), expected));
},
results -> assertEquals(results.getOnlyColumnAsSet(), expected));

assertUpdate("DROP TABLE IF EXISTS projection_pushdown_reads_less_data");
}

@Test
public void testProjectionPushdownOnPartitionedTables()
{
try {
assertUpdate("CREATE TABLE table_with_partition_at_beginning (id BIGINT, root ROW(f1 BIGINT, f2 BIGINT)) WITH (partitioning = ARRAY['id'])");
assertUpdate("INSERT INTO table_with_partition_at_beginning VALUES (1, ROW(1, 2)), (1, ROW(2, 3)), (1, ROW(3, 4))", 3);
assertQuery("SELECT id, root.f2 FROM table_with_partition_at_beginning", "VALUES (1, 2), (1, 3), (1, 4)");
assertUpdate("CREATE TABLE table_with_partition_at_beginning (id BIGINT, root ROW(f1 BIGINT, f2 BIGINT)) WITH (partitioning = ARRAY['id'])");
assertUpdate("INSERT INTO table_with_partition_at_beginning VALUES (1, ROW(1, 2)), (1, ROW(2, 3)), (1, ROW(3, 4))", 3);
assertQuery("SELECT id, root.f2 FROM table_with_partition_at_beginning", "VALUES (1, 2), (1, 3), (1, 4)");
assertUpdate("DROP TABLE table_with_partition_at_beginning");

assertUpdate("CREATE TABLE table_with_partition_at_end (root ROW(f1 BIGINT, f2 BIGINT), id BIGINT) WITH (partitioning = ARRAY['id'])");
assertUpdate("INSERT INTO table_with_partition_at_end VALUES (ROW(1, 2), 1), (ROW(2, 3), 1), (ROW(3, 4), 1)", 3);
assertQuery("SELECT root.f2, id FROM table_with_partition_at_end", "VALUES (2, 1), (3, 1), (4, 1)");
}
finally {
assertUpdate("DROP TABLE IF EXISTS table_with_partition_at_beginning");
assertUpdate("DROP TABLE IF EXISTS table_with_partition_at_end");
}
assertUpdate("CREATE TABLE table_with_partition_at_end (root ROW(f1 BIGINT, f2 BIGINT), id BIGINT) WITH (partitioning = ARRAY['id'])");
assertUpdate("INSERT INTO table_with_partition_at_end VALUES (ROW(1, 2), 1), (ROW(2, 3), 1), (ROW(3, 4), 1)", 3);
assertQuery("SELECT root.f2, id FROM table_with_partition_at_end", "VALUES (2, 1), (3, 1), (4, 1)");
assertUpdate("DROP TABLE table_with_partition_at_end");
}

@Test
Expand Down

0 comments on commit 20dbfe6

Please sign in to comment.