diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java index 9f27fcf40016..379d28b1516d 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java @@ -13,8 +13,10 @@ */ package io.trino.plugin.deltalake; +import io.trino.plugin.hive.containers.HiveHadoop; import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.testing.QueryRunner; +import org.testng.annotations.AfterClass; import java.util.List; @@ -24,12 +26,22 @@ public abstract class BaseDeltaLakeAwsConnectorSmokeTest extends BaseDeltaLakeConnectorSmokeTest { + protected HiveMinioDataLake hiveMinioDataLake; + @Override - protected HiveMinioDataLake createHiveMinioDataLake() + protected HiveHadoop createHiveHadoop() { - hiveMinioDataLake = new HiveMinioDataLake(bucketName); // closed by superclass + hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); hiveMinioDataLake.start(); - return hiveMinioDataLake; + return hiveMinioDataLake.getHiveHadoop(); // closed by superclass + } + + @Override + @AfterClass(alwaysRun = true) + public void cleanUp() + { + hiveMinioDataLake = null; // closed by closeAfterClass + super.cleanUp(); } @Override diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java index 88d36baecbe3..6ea3890c5dbd 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java @@ -26,7 +26,6 @@ import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess; import io.trino.plugin.hive.TestingHivePlugin; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.HiveMetastore; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.spi.QueryId; @@ -42,6 +41,7 @@ import io.trino.tpch.TpchTable; import org.intellij.lang.annotations.Language; import org.testng.SkipException; +import org.testng.annotations.AfterClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -123,13 +123,13 @@ public abstract class BaseDeltaLakeConnectorSmokeTest protected final String bucketName = "test-delta-lake-integration-smoke-test-" + randomNameSuffix(); - protected HiveMinioDataLake hiveMinioDataLake; + protected HiveHadoop hiveHadoop; private HiveMetastore metastore; private TransactionLogAccess transactionLogAccess; protected void environmentSetup() {} - protected abstract HiveMinioDataLake createHiveMinioDataLake() + protected abstract HiveHadoop createHiveHadoop() throws Exception; protected abstract Map hiveStorageConfiguration(); @@ -152,10 +152,10 @@ protected QueryRunner createQueryRunner() { environmentSetup(); - this.hiveMinioDataLake = closeAfterClass(createHiveMinioDataLake()); + this.hiveHadoop = closeAfterClass(createHiveHadoop()); this.metastore = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() - .metastoreClient(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(hiveHadoop.getHiveMetastoreEndpoint()) .build()); DistributedQueryRunner queryRunner = createDeltaLakeQueryRunner(); @@ -195,7 +195,7 @@ protected QueryRunner createQueryRunner() "hive", "hive", ImmutableMap.builder() - .put("hive.metastore.uri", "thrift://" + hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .put("hive.metastore.uri", "thrift://" + hiveHadoop.getHiveMetastoreEndpoint()) .put("hive.allow-drop-table", "true") .putAll(hiveStorageConfiguration()) .buildOrThrow()); @@ -224,10 +224,16 @@ private DistributedQueryRunner createDeltaLakeQueryRunner() .put("hive.metastore-timeout", "1m") // read timed out sometimes happens with the default timeout .putAll(deltaStorageConfiguration()) .buildOrThrow(), - hiveMinioDataLake.getHiveHadoop(), + hiveHadoop, queryRunner -> {}); } + @AfterClass(alwaysRun = true) + public void cleanUp() + { + hiveHadoop = null; // closed by closeAfterClass + } + @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { @@ -246,32 +252,30 @@ public void testDropSchemaExternalFiles() String subDir = schemaDir + "subdir/"; String externalFile = subDir + "external-file"; - HiveHadoop hadoopContainer = hiveMinioDataLake.getHiveHadoop(); - // Create file in a subdirectory of the schema directory before creating schema - hadoopContainer.executeInContainerFailOnError("hdfs", "dfs", "-mkdir", "-p", subDir); - hadoopContainer.executeInContainerFailOnError("hdfs", "dfs", "-touchz", externalFile); + hiveHadoop.executeInContainerFailOnError("hdfs", "dfs", "-mkdir", "-p", subDir); + hiveHadoop.executeInContainerFailOnError("hdfs", "dfs", "-touchz", externalFile); query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + assertThat(hiveHadoop.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) .as("external file exists after creating schema") .isEqualTo(0); query("DROP SCHEMA " + schemaName); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + assertThat(hiveHadoop.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) .as("external file exists after dropping schema") .isEqualTo(0); // Test behavior without external file - hadoopContainer.executeInContainerFailOnError("hdfs", "dfs", "-rm", "-r", subDir); + hiveHadoop.executeInContainerFailOnError("hdfs", "dfs", "-rm", "-r", subDir); query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode()) + assertThat(hiveHadoop.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode()) .as("schema directory exists after creating schema") .isEqualTo(0); query("DROP SCHEMA " + schemaName); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + assertThat(hiveHadoop.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) .as("schema directory deleted after dropping schema without external file") .isEqualTo(1); } @@ -476,20 +480,20 @@ public void testHiddenColumns() public void testHiveViewsCannotBeAccessed() { String viewName = "dummy_view"; - hiveMinioDataLake.getHiveHadoop().runOnHive(format("CREATE VIEW %1$s.%2$s AS SELECT * FROM %1$s.customer", SCHEMA, viewName)); + hiveHadoop.runOnHive(format("CREATE VIEW %1$s.%2$s AS SELECT * FROM %1$s.customer", SCHEMA, viewName)); assertEquals(computeScalar(format("SHOW TABLES LIKE '%s'", viewName)), viewName); assertThatThrownBy(() -> computeActual("DESCRIBE " + viewName)).hasMessageContaining(format("%s.%s is not a Delta Lake table", SCHEMA, viewName)); - hiveMinioDataLake.getHiveHadoop().runOnHive("DROP VIEW " + viewName); + hiveHadoop.runOnHive("DROP VIEW " + viewName); } @Test public void testNonDeltaTablesCannotBeAccessed() { String tableName = "hive_table"; - hiveMinioDataLake.getHiveHadoop().runOnHive(format("CREATE TABLE %s.%s (id BIGINT)", SCHEMA, tableName)); + hiveHadoop.runOnHive(format("CREATE TABLE %s.%s (id BIGINT)", SCHEMA, tableName)); assertEquals(computeScalar(format("SHOW TABLES LIKE '%s'", tableName)), tableName); assertThatThrownBy(() -> computeActual("DESCRIBE " + tableName)).hasMessageContaining(tableName + " is not a Delta Lake table"); - hiveMinioDataLake.getHiveHadoop().runOnHive(format("DROP TABLE %s.%s", SCHEMA, tableName)); + hiveHadoop.runOnHive(format("DROP TABLE %s.%s", SCHEMA, tableName)); } @Test diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java index 7948a2ad6e11..3f6080a2b16a 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java @@ -23,7 +23,6 @@ import com.google.common.io.Resources; import com.google.common.reflect.ClassPath; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.testing.QueryRunner; import org.testng.annotations.AfterClass; import org.testng.annotations.Parameters; @@ -44,11 +43,13 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.trino.plugin.deltalake.DeltaLakeQueryRunner.requiredNonEmptySystemProperty; +import static io.trino.plugin.hive.containers.HiveHadoop.HIVE3_IMAGE; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; import static java.util.regex.Matcher.quoteReplacement; import static org.assertj.core.api.Assertions.assertThat; +import static org.testcontainers.containers.Network.newNetwork; public class TestDeltaLakeAdlsConnectorSmokeTest extends BaseDeltaLakeConnectorSmokeTest @@ -76,7 +77,7 @@ public TestDeltaLakeAdlsConnectorSmokeTest(String container, String account, Str } @Override - protected HiveMinioDataLake createHiveMinioDataLake() + protected HiveHadoop createHiveHadoop() throws Exception { String abfsSpecificCoreSiteXmlContent = Resources.toString(Resources.getResource("io/trino/plugin/deltalake/hdp3.1-core-site.xml.abfs-template"), UTF_8) @@ -88,12 +89,13 @@ protected HiveMinioDataLake createHiveMinioDataLake() hadoopCoreSiteXmlTempFile.toFile().deleteOnExit(); Files.writeString(hadoopCoreSiteXmlTempFile, abfsSpecificCoreSiteXmlContent); - HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake( - bucketName, - ImmutableMap.of("/etc/hadoop/conf/core-site.xml", hadoopCoreSiteXmlTempFile.normalize().toAbsolutePath().toString()), - HiveHadoop.HIVE3_IMAGE); - hiveMinioDataLake.start(); - return hiveMinioDataLake; // closed by superclass + HiveHadoop hiveHadoop = HiveHadoop.builder() + .withImage(HIVE3_IMAGE) + .withNetwork(closeAfterClass(newNetwork())) + .withFilesToMount(ImmutableMap.of("/etc/hadoop/conf/core-site.xml", hadoopCoreSiteXmlTempFile.normalize().toAbsolutePath().toString())) + .build(); + hiveHadoop.start(); + return hiveHadoop; // closed by superclass } @Override @@ -115,7 +117,7 @@ protected Map deltaStorageConfiguration() public void removeTestData() { if (adlsDirectory != null) { - hiveMinioDataLake.getHiveHadoop().executeInContainerFailOnError("hadoop", "fs", "-rm", "-f", "-r", adlsDirectory); + hiveHadoop.executeInContainerFailOnError("hadoop", "fs", "-rm", "-f", "-r", adlsDirectory); } assertThat(azureContainerClient.listBlobsByHierarchy(bucketName + "/").stream()).hasSize(0); } diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeGcsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeGcsConnectorSmokeTest.java index e983795cba2c..053680653c39 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeGcsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeGcsConnectorSmokeTest.java @@ -27,7 +27,7 @@ import io.trino.hadoop.ConfigurationInstantiator; import io.trino.hdfs.gcs.GoogleGcsConfigurationInitializer; import io.trino.hdfs.gcs.HiveGcsConfig; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.HiveHadoop; import io.trino.testing.QueryRunner; import org.apache.hadoop.conf.Configuration; import org.testng.annotations.AfterClass; @@ -53,6 +53,7 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; import static java.util.regex.Matcher.quoteReplacement; +import static org.testcontainers.containers.Network.newNetwork; /** * This test requires these variables to connect to GCS: @@ -114,7 +115,7 @@ public void removeTestData() } @Override - protected HiveMinioDataLake createHiveMinioDataLake() + protected HiveHadoop createHiveHadoop() throws Exception { String gcpSpecificCoreSiteXmlContent = Resources.toString(Resources.getResource("io/trino/plugin/deltalake/hdp3.1-core-site.xml.gcs-template"), UTF_8) @@ -124,14 +125,15 @@ protected HiveMinioDataLake createHiveMinioDataLake() hadoopCoreSiteXmlTempFile.toFile().deleteOnExit(); Files.writeString(hadoopCoreSiteXmlTempFile, gcpSpecificCoreSiteXmlContent); - HiveMinioDataLake dataLake = new HiveMinioDataLake( - bucketName, - ImmutableMap.of( + HiveHadoop hiveHadoop = HiveHadoop.builder() + .withImage(HIVE3_IMAGE) + .withNetwork(closeAfterClass(newNetwork())) + .withFilesToMount(ImmutableMap.of( "/etc/hadoop/conf/core-site.xml", hadoopCoreSiteXmlTempFile.normalize().toAbsolutePath().toString(), - "/etc/hadoop/conf/gcp-credentials.json", gcpCredentialsFile.toAbsolutePath().toString()), - HIVE3_IMAGE); - dataLake.start(); - return dataLake; // closed by superclass + "/etc/hadoop/conf/gcp-credentials.json", gcpCredentialsFile.toAbsolutePath().toString())) + .build(); + hiveHadoop.start(); + return hiveHadoop; // closed by superclass } @Override