Skip to content
Permalink

Comparing changes

This is a direct comparison between two commits made in this repository or its related repositories. View the default comparison for this range or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: trinodb/trino
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 3e95d3272364270dcee6ac40dbc3d505f37697b6
Choose a base ref
..
head repository: trinodb/trino
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 63bffd171f23b49558dcb470f2ae2bd4704544b3
Choose a head ref
Original file line number Diff line number Diff line change
@@ -21,7 +21,6 @@
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.hive.util.TableLocationUtils.getTableLocation;
import static io.trino.tests.product.utils.QueryExecutors.onHive;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
@@ -201,7 +200,7 @@ private void prepare(String tableName)
removeHdfsDirectory(tableLocation);
makeHdfsDirectory(tableLocation);

onHive().executeQuery("CREATE TABLE " + tableName + " (payload bigint) PARTITIONED BY (col_x string, col_y string) STORED AS ORC LOCATION '" + tableLocation + "'");
createTable(tableName, tableLocation);
onTrino().executeQuery("INSERT INTO " + tableName + " VALUES (1, 'a', '1'), (2, 'b', '2')");

// remove partition col_x=b/col_y=2
@@ -223,6 +222,8 @@ private void prepare(String tableName)
assertPartitions(tableName, row("a", "1"), row("b", "2"));
}

protected abstract void createTable(String tableName, String location);

protected abstract void removeHdfsDirectory(String path);

protected abstract void makeHdfsDirectory(String path);
Original file line number Diff line number Diff line change
@@ -140,6 +140,12 @@ protected void copyOrcFileToHdfsDirectory(String tableName, String targetDirecto
onHive().executeQuery(format("dfs -cp %s %s", orcFilePath, targetDirectory));
}

@Override
protected void createTable(String tableName, String tableLocation)
{
onHive().executeQuery("CREATE TABLE " + tableName + " (payload bigint) PARTITIONED BY (col_x string, col_y string) STORED AS ORC LOCATION '" + tableLocation + "'");
}

// Drop and create a table. Then, return single ORC file path
private String generateOrcFile()
{
Original file line number Diff line number Diff line change
@@ -27,6 +27,7 @@
import static io.trino.tests.product.TestGroups.TRINO_JDBC;
import static io.trino.tests.product.hive.HiveProductTest.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.hive.HiveProductTest.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;

public class TestHdfsSyncPartitionMetadata
extends BaseTestSyncPartitionMetadata
@@ -127,4 +128,10 @@ protected void copyOrcFileToHdfsDirectory(String tableName, String targetDirecto
HiveDataSource dataSource = createResourceDataSource(tableName, "io/trino/tests/product/hive/data/single_int_column/data.orc");
hdfsDataSourceWriter.ensureDataOnHdfs(targetDirectory, dataSource);
}

@Override
protected void createTable(String tableName, String tableLocation)
{
onTrino().executeQuery("CREATE TABLE " + tableName + " (payload bigint, col_x varchar, col_y varchar) WITH (format = 'ORC', partitioned_by = ARRAY[ 'col_x', 'col_y' ])");
}
}