Skip to content

Commit

Permalink
Record connectors used in delta lake test environments
Browse files Browse the repository at this point in the history
  • Loading branch information
nineinchnick authored and ebyhr committed May 26, 2022
1 parent 9f19e71 commit d8e99b0
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,12 @@ public void extendEnvironment(Environment.Builder builder)
.withEnv("HIVE_METASTORE_URI", hiveMetastoreUri)
.withEnv("DATABRICKS_TEST_JDBC_URL", databricksTestJdbcUrl)
.withEnv("DATABRICKS_TEST_LOGIN", databricksTestLogin)
.withEnv("DATABRICKS_TEST_TOKEN", databricksTestToken)
.withCopyFileToContainer(forHostPath(configDir.getPath("hive.properties")), CONTAINER_PRESTO_ETC + "/catalog/hive.properties")
.withCopyFileToContainer(forHostPath(configDir.getPath("delta.properties")), CONTAINER_PRESTO_ETC + "/catalog/delta.properties"));
.withEnv("DATABRICKS_TEST_TOKEN", databricksTestToken));
builder.addConnector("hive", forHostPath(configDir.getPath("hive.properties")));
builder.addConnector(
"delta-lake",
forHostPath(configDir.getPath("delta.properties")),
CONTAINER_PRESTO_ETC + "/catalog/delta.properties");

builder.configureContainer(TESTS, container -> exportAWSCredentials(container)
.withEnv("S3_BUCKET", s3Bucket)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
package io.trino.tests.product.launcher.env.environment;

import io.trino.tests.product.launcher.docker.DockerFiles;
import io.trino.tests.product.launcher.env.DockerContainer;
import io.trino.tests.product.launcher.env.Environment;
import io.trino.tests.product.launcher.env.EnvironmentProvider;
import io.trino.tests.product.launcher.env.common.Hadoop;
Expand All @@ -24,10 +23,6 @@

import javax.inject.Inject;

import static io.trino.tests.product.launcher.env.EnvironmentContainers.COORDINATOR;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.WORKER;
import static io.trino.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES;
import static io.trino.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES;
import static io.trino.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.utility.MountableFile.forHostPath;
Expand All @@ -39,35 +34,24 @@
public class EnvMultinodeMinioDataLake
extends EnvironmentProvider
{
private final DockerFiles dockerFiles;
private final DockerFiles.ResourceProvider configDir;

@Inject
public EnvMultinodeMinioDataLake(StandardMultinode standardMultinode, Hadoop hadoop, Minio minio, DockerFiles dockerFiles)
{
super(standardMultinode, hadoop, minio);
this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null");
this.configDir = requireNonNull(dockerFiles, "dockerFiles is null").getDockerFilesHostDirectory("conf/environment/singlenode-minio-data-lake");
}

@Override
public void extendEnvironment(Environment.Builder builder)
{
builder.configureContainer(COORDINATOR, this::configureTrinoContainer);
builder.configureContainer(WORKER, this::configureTrinoContainer);
}

private void configureTrinoContainer(DockerContainer container)
{
container.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-minio-data-lake/hive.properties")),
CONTAINER_PRESTO_HIVE_PROPERTIES);
container.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-minio-data-lake/delta.properties")),
builder.addConnector("hive", forHostPath(configDir.getPath("hive.properties")));
builder.addConnector(
"delta-lake",
forHostPath(configDir.getPath("delta.properties")),
CONTAINER_PRESTO_ETC + "/catalog/delta.properties");
container.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-minio-data-lake/iceberg.properties")),
CONTAINER_PRESTO_ICEBERG_PROPERTIES);
container.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-minio-data-lake/memory.properties")),
CONTAINER_PRESTO_ETC + "/catalog/memory.properties");
builder.addConnector("iceberg", forHostPath(configDir.getPath("iceberg.properties")));
builder.addConnector("memory", forHostPath(configDir.getPath("memory.properties")));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

import javax.inject.Inject;

import static io.trino.tests.product.launcher.env.EnvironmentContainers.COORDINATOR;
import static io.trino.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.utility.MountableFile.forHostPath;
Expand All @@ -32,22 +31,21 @@
public class EnvSinglenodeDeltaLakeKerberizedHdfs
extends EnvironmentProvider
{
private final DockerFiles dockerFiles;
private final DockerFiles.ResourceProvider configDir;

@Inject
public EnvSinglenodeDeltaLakeKerberizedHdfs(Standard standard, Hadoop hadoop, HadoopKerberos hadoopKerberos, DockerFiles dockerFiles)
{
super(standard, hadoop, hadoopKerberos);
this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null");
this.configDir = requireNonNull(dockerFiles, "dockerFiles is null").getDockerFilesHostDirectory("conf/environment/singlenode-delta-lake-kerberized-hdfs");
}

@Override
public void extendEnvironment(Environment.Builder builder)
{
builder.configureContainer(COORDINATOR, dockerContainer -> {
dockerContainer.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-delta-lake-kerberized-hdfs/delta.properties")),
CONTAINER_PRESTO_ETC + "/catalog/delta.properties");
});
builder.addConnector(
"delta-lake",
forHostPath(configDir.getPath("delta.properties")),
CONTAINER_PRESTO_ETC + "/catalog/delta.properties");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import java.util.Set;

import static io.trino.tests.product.launcher.docker.ContainerUtil.forSelectedPorts;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.COORDINATOR;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.HADOOP;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.TESTS;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.configureTempto;
Expand Down Expand Up @@ -94,9 +93,11 @@ public void extendEnvironment(Environment.Builder builder)
container.setDockerImageName("ghcr.io/trinodb/testing/hdp3.1-hive:" + hadoopImagesVersion);
});

builder.configureContainer(COORDINATOR, container -> container
.withCopyFileToContainer(forHostPath(configDir.getPath("hive.properties")), CONTAINER_PRESTO_ETC + "/catalog/hive.properties")
.withCopyFileToContainer(forHostPath(configDir.getPath("delta.properties")), CONTAINER_PRESTO_ETC + "/catalog/delta.properties"));
builder.addConnector("hive", forHostPath(configDir.getPath("hive.properties")));
builder.addConnector(
"delta-lake",
forHostPath(configDir.getPath("delta.properties")),
CONTAINER_PRESTO_ETC + "/catalog/delta.properties");

builder.configureContainer(TESTS, dockerContainer -> {
dockerContainer.withEnv("S3_BUCKET", s3Bucket)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,26 +33,26 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvMultinodeMinioDataLake.class)
.withGroups("delta-lake-minio")
.withGroups("configured_features", "delta-lake-minio")
.build(),

testOnEnvironment(EnvSinglenodeDeltaLakeKerberizedHdfs.class)
.withGroups("delta-lake-hdfs")
.withGroups("configured_features", "delta-lake-hdfs")
.build(),
//TODO enable the product tests against Databricks testing environment
// testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks.class)
// .withGroups("delta-lake-databricks")
// .withGroups("configured_features", "delta-lake-databricks")
// .withExcludedGroups("delta-lake-exclude-73")
// .build(),
//
// testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks91.class)
// .withGroups("delta-lake-databricks")
// .withGroups("configured_features", "delta-lake-databricks")
// .build(),

testOnEnvironment(EnvSinglenodeDeltaLakeOss.class)
// TODO: make the list of tests run here as close to those run on SinglenodeDeltaLakeDatabricks
// e.g. replace `delta-lake-oss` group with `delta-lake-databricks` + any exclusions, of needed
.withGroups("delta-lake-oss")
.withGroups("configured_features", "delta-lake-oss")
.build());
}
}

0 comments on commit d8e99b0

Please sign in to comment.