Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use URI path for Glue location in tests #16771

Merged
merged 1 commit into from
Mar 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public abstract class BaseDeltaLakeRegisterTableProcedureTest
protected static final String CATALOG_NAME = "delta_lake";
protected static final String SCHEMA = "test_delta_lake_register_table_" + randomNameSuffix();

private String dataDirectory;
private Path dataDirectory;
private HiveMetastore metastore;

@Override
Expand All @@ -65,7 +65,7 @@ protected QueryRunner createQueryRunner()
.build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).build();

this.dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data").toString();
this.dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data");
this.metastore = createTestMetastore(dataDirectory);

queryRunner.installPlugin(new TestingDeltaLakePlugin(Optional.of(new TestingDeltaLakeMetastoreModule(metastore)), EMPTY_MODULE));
Expand All @@ -81,15 +81,15 @@ protected QueryRunner createQueryRunner()
return queryRunner;
}

protected abstract HiveMetastore createTestMetastore(String dataDirectory);
protected abstract HiveMetastore createTestMetastore(Path dataDirectory);

@AfterClass(alwaysRun = true)
public void tearDown()
throws IOException
{
if (metastore != null) {
metastore.dropDatabase(SCHEMA, false);
deleteRecursively(Path.of(dataDirectory), ALLOW_INSECURE);
deleteRecursively(dataDirectory, ALLOW_INSECURE);
}
}

Expand Down Expand Up @@ -181,7 +181,7 @@ public void testRegisterTableWithDifferentTableName()
public void testRegisterTableWithTrailingSpaceInLocation()
{
String tableName = "test_register_table_with_trailing_space_" + randomNameSuffix();
String tableLocationWithTrailingSpace = dataDirectory + "/" + tableName + " ";
String tableLocationWithTrailingSpace = dataDirectory.toUri() + "/" + tableName + " ";

assertQuerySucceeds(format("CREATE TABLE %s WITH (location = '%s') AS SELECT 1 AS a, 'INDIA' AS b, true AS c", tableName, tableLocationWithTrailingSpace));
assertQuery("SELECT * FROM " + tableName, "VALUES (1, 'INDIA', true)");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public abstract class BaseDeltaLakeSharedMetastoreViewsTest
protected static final String HIVE_CATALOG_NAME = "hive";
protected static final String SCHEMA = "test_shared_schema_views_" + randomNameSuffix();

private String dataDirectory;
private Path dataDirectory;
private HiveMetastore metastore;

@Override
Expand All @@ -58,7 +58,7 @@ protected QueryRunner createQueryRunner()
.build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).build();

this.dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data").toString();
this.dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data");
this.metastore = createTestMetastore(dataDirectory);

queryRunner.installPlugin(new TestingDeltaLakePlugin(Optional.of(new TestingDeltaLakeMetastoreModule(metastore)), EMPTY_MODULE));
Expand All @@ -76,7 +76,7 @@ protected QueryRunner createQueryRunner()
return queryRunner;
}

protected abstract HiveMetastore createTestMetastore(String dataDirectory);
protected abstract HiveMetastore createTestMetastore(Path dataDirectory);

@Test
public void testViewWithLiteralColumnCreatedInDeltaLakeIsReadableInHive()
Expand Down Expand Up @@ -160,7 +160,7 @@ public void cleanup()
{
if (metastore != null) {
metastore.dropDatabase(SCHEMA, false);
deleteRecursively(Path.of(dataDirectory), ALLOW_INSECURE);
deleteRecursively(dataDirectory, ALLOW_INSECURE);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@

import io.trino.plugin.hive.metastore.HiveMetastore;

import java.io.File;
import java.nio.file.Path;

import static io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore;

public class TestDeltaLakeRegisterTableProcedureWithFileMetastore
extends BaseDeltaLakeRegisterTableProcedureTest
{
@Override
protected HiveMetastore createTestMetastore(String dataDirectory)
protected HiveMetastore createTestMetastore(Path dataDirectory)
{
return createTestingFileHiveMetastore(new File(dataDirectory));
return createTestingFileHiveMetastore(dataDirectory.toFile());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@

import io.trino.plugin.hive.metastore.HiveMetastore;

import java.io.File;
import java.nio.file.Path;

import static io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore;

public class TestDeltaLakeSharedFileMetastoreViews
extends BaseDeltaLakeSharedMetastoreViewsTest
{
@Override
protected HiveMetastore createTestMetastore(String dataDirectory)
protected HiveMetastore createTestMetastore(Path dataDirectory)
{
return createTestingFileHiveMetastore(new File(dataDirectory));
return createTestingFileHiveMetastore(dataDirectory.toFile());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@

import io.trino.plugin.hive.metastore.HiveMetastore;

import java.nio.file.Path;

import static io.trino.plugin.hive.metastore.glue.GlueHiveMetastore.createTestingGlueHiveMetastore;

/**
Expand All @@ -25,7 +27,7 @@ public class TestDeltaLakeSharedGlueMetastoreViews
extends BaseDeltaLakeSharedMetastoreViewsTest
{
@Override
protected HiveMetastore createTestMetastore(String dataDirectory)
protected HiveMetastore createTestMetastore(Path dataDirectory)
{
return createTestingGlueHiveMetastore(dataDirectory);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,18 @@ protected QueryRunner createQueryRunner()
"delta_lake",
ImmutableMap.<String, String>builder()
.put("hive.metastore", "glue")
.put("hive.metastore.glue.default-warehouse-dir", dataDirectory.toString())
.put("hive.metastore.glue.default-warehouse-dir", dataDirectory.toUri().toString())
.put("delta.hive-catalog-name", "hive_with_redirections")
.buildOrThrow());

this.glueMetastore = createTestingGlueHiveMetastore(dataDirectory.toString());
this.glueMetastore = createTestingGlueHiveMetastore(dataDirectory);
queryRunner.installPlugin(new TestingHivePlugin(glueMetastore));
queryRunner.createCatalog(
"hive_with_redirections",
"hive",
ImmutableMap.of("hive.delta-lake-catalog-name", "delta_with_redirections"));

queryRunner.execute("CREATE SCHEMA " + schema + " WITH (location = '" + dataDirectory.toString() + "')");
queryRunner.execute("CREATE SCHEMA " + schema + " WITH (location = '" + dataDirectory.toUri() + "')");
queryRunner.execute("CREATE TABLE hive_with_redirections." + schema + ".hive_table (a_integer) WITH (format='PARQUET') AS VALUES 1, 2, 3");
queryRunner.execute("CREATE TABLE delta_with_redirections." + schema + ".delta_table (a_varchar) AS VALUES 'a', 'b', 'c'");

Expand Down Expand Up @@ -102,7 +102,7 @@ protected String getExpectedHiveCreateSchema(String catalogName)
" location = '%s'\n" +
")";

return format(expectedHiveCreateSchema, catalogName, schema, dataDirectory);
return format(expectedHiveCreateSchema, catalogName, schema, dataDirectory.toUri().toString().replaceFirst("/$", ""));
}

@Override
Expand All @@ -112,6 +112,6 @@ protected String getExpectedDeltaLakeCreateSchema(String catalogName)
"WITH (\n" +
" location = '%s'\n" +
")";
return format(expectedDeltaLakeCreateSchema, catalogName, schema, dataDirectory, schema);
return format(expectedDeltaLakeCreateSchema, catalogName, schema, dataDirectory.toUri().toString().replaceFirst("/$", ""));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,12 @@ protected QueryRunner createQueryRunner()
ImmutableMap.<String, String>builder()
.put("hive.metastore", "glue")
.put("hive.metastore.glue.region", "us-east-2")
.put("hive.metastore.glue.default-warehouse-dir", metastoreDir.getPath())
.put("hive.metastore.glue.default-warehouse-dir", metastoreDir.toURI().toString())
.buildOrThrow());

metastore = createTestingGlueHiveMetastore(metastoreDir.getPath());
metastore = createTestingGlueHiveMetastore(metastoreDir.toPath());

queryRunner.execute("CREATE SCHEMA " + SCHEMA + " WITH (location = '" + metastoreDir.getPath() + "')");
queryRunner.execute("CREATE SCHEMA " + SCHEMA + " WITH (location = '" + metastoreDir.toURI() + "')");
return queryRunner;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public class TestDeltaLakeConcurrentModificationGlueMetastore
{
private static final String CATALOG_NAME = "test_delta_lake_concurrent";
private static final String SCHEMA = "test_delta_lake_glue_concurrent_" + randomNameSuffix();
private String dataDirectory;
private Path dataDirectory;
private GlueHiveMetastore metastore;
private final AtomicBoolean failNextGlueDeleteTableCall = new AtomicBoolean(false);

Expand All @@ -69,10 +69,10 @@ protected QueryRunner createQueryRunner()

DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(deltaLakeSession).build();

dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("data_delta_concurrent").toString();
dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("data_delta_concurrent");
GlueMetastoreStats stats = new GlueMetastoreStats();
GlueHiveMetastoreConfig glueConfig = new GlueHiveMetastoreConfig()
.setDefaultWarehouseDir(dataDirectory);
.setDefaultWarehouseDir(dataDirectory.toUri().toString());

AWSGlueAsync glueClient = createAsyncGlueClient(glueConfig, DefaultAWSCredentialsProviderChain.getInstance(), Optional.empty(), stats.newRequestMetricsCollector());
AWSGlueAsync proxiedGlueClient = newProxy(AWSGlueAsync.class, (proxy, method, args) -> {
Expand Down Expand Up @@ -123,7 +123,7 @@ public void cleanup()
{
if (metastore != null) {
metastore.dropDatabase(SCHEMA, false);
deleteRecursively(Path.of(dataDirectory), ALLOW_INSECURE);
deleteRecursively(dataDirectory, ALLOW_INSECURE);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@
import io.trino.plugin.deltalake.BaseDeltaLakeRegisterTableProcedureTest;
import io.trino.plugin.hive.metastore.HiveMetastore;

import java.nio.file.Path;

import static io.trino.plugin.hive.metastore.glue.GlueHiveMetastore.createTestingGlueHiveMetastore;

public class TestDeltaLakeRegisterTableProcedureWithGlue
extends BaseDeltaLakeRegisterTableProcedureTest
{
@Override
protected HiveMetastore createTestMetastore(String dataDirectory)
protected HiveMetastore createTestMetastore(Path dataDirectory)
{
return createTestingGlueHiveMetastore(dataDirectory);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;

import java.io.File;
import java.nio.file.Path;

import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.testing.TestingSession.testSessionBuilder;
Expand All @@ -34,7 +34,7 @@ public class TestDeltaLakeRenameToWithGlueMetastore
protected static final String SCHEMA = "test_delta_lake_rename_to_with_glue_" + randomNameSuffix();
protected static final String CATALOG_NAME = "test_delta_lake_rename_to_with_glue";

private File schemaLocation;
private Path schemaLocation;

@Override
protected QueryRunner createQueryRunner()
Expand All @@ -49,9 +49,9 @@ protected QueryRunner createQueryRunner()
.setCatalogName(CATALOG_NAME)
.setDeltaProperties(ImmutableMap.of("hive.metastore", "glue"))
.build();
schemaLocation = new File(queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data").toString());
schemaLocation.deleteOnExit();
queryRunner.execute("CREATE SCHEMA " + SCHEMA + " WITH (location = '" + schemaLocation.getPath() + "')");
schemaLocation = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake_data");
schemaLocation.toFile().deleteOnExit();
queryRunner.execute("CREATE SCHEMA " + SCHEMA + " WITH (location = '" + schemaLocation.toUri() + "')");
return queryRunner;
}

Expand All @@ -60,7 +60,7 @@ public void testRenameOfExternalTable()
{
String oldTable = "test_table_external_to_be_renamed_" + randomNameSuffix();
String newTable = "test_table_external_renamed_" + randomNameSuffix();
String location = schemaLocation.getPath() + "/tableLocation/";
String location = schemaLocation.toUri() + "/tableLocation/";
try {
assertUpdate(format("CREATE TABLE %s WITH (location = '%s') AS SELECT 1 AS val ", oldTable, location), 1);
String oldLocation = (String) computeScalar("SELECT \"$path\" FROM " + oldTable);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ public class TestDeltaLakeViewsGlueMetastore
{
private static final String SCHEMA = "test_delta_lake_glue_views_" + randomNameSuffix();
private static final String CATALOG_NAME = "test_delta_lake_glue_views";
private String dataDirectory;
private Path dataDirectory;
private HiveMetastore metastore;

private HiveMetastore createTestMetastore(String dataDirectory)
private HiveMetastore createTestMetastore(Path dataDirectory)
{
return createTestingGlueHiveMetastore(dataDirectory);
}
Expand All @@ -61,7 +61,7 @@ protected QueryRunner createQueryRunner()

DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(deltaLakeSession).build();

dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("data_delta_lake_views").toString();
dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("data_delta_lake_views");
metastore = createTestMetastore(dataDirectory);

queryRunner.installPlugin(new TestingDeltaLakePlugin(Optional.of(new TestingDeltaLakeMetastoreModule(metastore)), EMPTY_MODULE));
Expand Down Expand Up @@ -89,7 +89,7 @@ public void cleanup()
{
if (metastore != null) {
metastore.dropDatabase(SCHEMA, false);
deleteRecursively(Path.of(dataDirectory), ALLOW_INSECURE);
deleteRecursively(dataDirectory, ALLOW_INSECURE);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -217,14 +217,14 @@ public GlueHiveMetastore(
}

@VisibleForTesting
public static GlueHiveMetastore createTestingGlueHiveMetastore(String defaultWarehouseDir)
public static GlueHiveMetastore createTestingGlueHiveMetastore(java.nio.file.Path defaultWarehouseDir)
{
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
GlueMetastoreStats stats = new GlueMetastoreStats();
GlueHiveMetastoreConfig glueConfig = new GlueHiveMetastoreConfig()
.setDefaultWarehouseDir(defaultWarehouseDir);
.setDefaultWarehouseDir(defaultWarehouseDir.toUri().toString());
return new GlueHiveMetastore(
hdfsEnvironment,
glueConfig,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ protected LocalQueryRunner createQueryRunner()
dataDirectory = Files.createTempDirectory("test_iceberg_create_table_failure");
dataDirectory.toFile().deleteOnExit();

glueHiveMetastore = createTestingGlueHiveMetastore(dataDirectory.toString());
glueHiveMetastore = createTestingGlueHiveMetastore(dataDirectory);
fileSystem = new HdfsFileSystemFactory(HDFS_ENVIRONMENT).create(TestingConnectorSession.SESSION);

Database database = Database.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ protected LocalQueryRunner createQueryRunner()
Path dataDirectory = Files.createTempDirectory("iceberg_data");
dataDirectory.toFile().deleteOnExit();

glueHiveMetastore = createTestingGlueHiveMetastore(dataDirectory.toString());
glueHiveMetastore = createTestingGlueHiveMetastore(dataDirectory);

Database database = Database.builder()
.setDatabaseName(schemaName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,15 +87,15 @@ protected QueryRunner createQueryRunner()
"hive.metastore.glue.default-warehouse-dir", dataDirectory.toString(),
"iceberg.hive-catalog-name", "hive"));

this.glueMetastore = createTestingGlueHiveMetastore(dataDirectory.toString());
this.glueMetastore = createTestingGlueHiveMetastore(dataDirectory);
queryRunner.installPlugin(new TestingHivePlugin(glueMetastore));
queryRunner.createCatalog(HIVE_CATALOG, "hive");
queryRunner.createCatalog(
"hive_with_redirections",
"hive",
ImmutableMap.of("hive.iceberg-catalog-name", "iceberg"));

queryRunner.execute("CREATE SCHEMA " + schema + " WITH (location = '" + dataDirectory.toString() + "')");
queryRunner.execute("CREATE SCHEMA " + schema + " WITH (location = '" + dataDirectory.toUri() + "')");
copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, icebergSession, ImmutableList.of(TpchTable.NATION));
copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, hiveSession, ImmutableList.of(TpchTable.REGION));

Expand Down Expand Up @@ -124,7 +124,7 @@ protected String getExpectedHiveCreateSchema(String catalogName)
" location = '%s'\n" +
")";

return format(expectedHiveCreateSchema, catalogName, schema, dataDirectory);
return format(expectedHiveCreateSchema, catalogName, schema, dataDirectory.toUri().toString().replaceFirst("/$", ""));
}

@Override
Expand All @@ -134,6 +134,6 @@ protected String getExpectedIcebergCreateSchema(String catalogName)
"WITH (\n" +
" location = '%s'\n" +
")";
return format(expectedIcebergCreateSchema, catalogName, schema, dataDirectory, schema);
return format(expectedIcebergCreateSchema, catalogName, schema, dataDirectory.toUri().toString().replaceFirst("/$", ""));
}
}