Skip to content

Commit

Permalink
Use SystemEnvUtils#requireEnv
Browse files Browse the repository at this point in the history
  • Loading branch information
krvikash committed Dec 17, 2024
1 parent a10c1f2 commit a596b43
Show file tree
Hide file tree
Showing 42 changed files with 128 additions and 158 deletions.
6 changes: 6 additions & 0 deletions lib/trino-filesystem-azure/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,12 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>io.trino</groupId>
<artifactId>trino-testing-services</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,6 @@ public abstract class AbstractTestAzureFileSystem
{
private final EncryptionKey key = EncryptionKey.randomAes256();

protected static String getRequiredEnvironmentVariable(String name)
{
return requireNonNull(System.getenv(name), "Environment variable not set: " + name);
}

protected enum AccountKind
{
HIERARCHICAL, FLAT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;

import static io.trino.filesystem.azure.AbstractTestAzureFileSystem.AccountKind.FLAT;
import static io.trino.testing.SystemEnvUtils.requireEnv;

@TestInstance(Lifecycle.PER_CLASS)
class TestAzureFileSystemGen2Flat
Expand All @@ -29,6 +30,6 @@ class TestAzureFileSystemGen2Flat
void setup()
throws IOException
{
initializeWithAccessKey(getRequiredEnvironmentVariable("ABFS_FLAT_ACCOUNT"), getRequiredEnvironmentVariable("ABFS_FLAT_ACCESS_KEY"), FLAT);
initializeWithAccessKey(requireEnv("ABFS_FLAT_ACCOUNT"), requireEnv("ABFS_FLAT_ACCESS_KEY"), FLAT);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;

import static io.trino.filesystem.azure.AbstractTestAzureFileSystem.AccountKind.HIERARCHICAL;
import static io.trino.testing.SystemEnvUtils.requireEnv;

@TestInstance(Lifecycle.PER_CLASS)
class TestAzureFileSystemGen2Hierarchical
Expand All @@ -29,6 +30,6 @@ class TestAzureFileSystemGen2Hierarchical
void setup()
throws IOException
{
initializeWithAccessKey(getRequiredEnvironmentVariable("ABFS_HIERARCHICAL_ACCOUNT"), getRequiredEnvironmentVariable("ABFS_HIERARCHICAL_ACCESS_KEY"), HIERARCHICAL);
initializeWithAccessKey(requireEnv("ABFS_HIERARCHICAL_ACCOUNT"), requireEnv("ABFS_HIERARCHICAL_ACCESS_KEY"), HIERARCHICAL);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;

import static io.trino.filesystem.azure.AbstractTestAzureFileSystem.AccountKind.FLAT;
import static io.trino.testing.SystemEnvUtils.requireEnv;

@TestInstance(Lifecycle.PER_CLASS)
public class TestAzureFileSystemOAuthGen2Flat
Expand All @@ -29,10 +30,10 @@ public class TestAzureFileSystemOAuthGen2Flat
void setup()
throws IOException
{
String account = getRequiredEnvironmentVariable("ABFS_FLAT_ACCOUNT");
String tenantId = getRequiredEnvironmentVariable("ABFS_OAUTH_TENANT_ID");
String clientId = getRequiredEnvironmentVariable("ABFS_OAUTH_CLIENT_ID");
String clientSecret = getRequiredEnvironmentVariable("ABFS_OAUTH_CLIENT_SECRET");
String account = requireEnv("ABFS_FLAT_ACCOUNT");
String tenantId = requireEnv("ABFS_OAUTH_TENANT_ID");
String clientId = requireEnv("ABFS_OAUTH_CLIENT_ID");
String clientSecret = requireEnv("ABFS_OAUTH_CLIENT_SECRET");
initializeWithOAuth(account, tenantId, clientId, clientSecret, FLAT);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;

import static io.trino.filesystem.azure.AbstractTestAzureFileSystem.AccountKind.HIERARCHICAL;
import static io.trino.testing.SystemEnvUtils.requireEnv;

@TestInstance(Lifecycle.PER_CLASS)
public class TestAzureFileSystemOAuthGen2Hierarchical
Expand All @@ -29,10 +30,10 @@ public class TestAzureFileSystemOAuthGen2Hierarchical
void setup()
throws IOException
{
String account = getRequiredEnvironmentVariable("ABFS_HIERARCHICAL_ACCOUNT");
String tenantId = getRequiredEnvironmentVariable("ABFS_OAUTH_TENANT_ID");
String clientId = getRequiredEnvironmentVariable("ABFS_OAUTH_CLIENT_ID");
String clientSecret = getRequiredEnvironmentVariable("ABFS_OAUTH_CLIENT_SECRET");
String account = requireEnv("ABFS_HIERARCHICAL_ACCOUNT");
String tenantId = requireEnv("ABFS_OAUTH_TENANT_ID");
String clientId = requireEnv("ABFS_OAUTH_CLIENT_ID");
String clientSecret = requireEnv("ABFS_OAUTH_CLIENT_SECRET");
initializeWithOAuth(account, tenantId, clientId, clientSecret, HIERARCHICAL);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import static com.google.cloud.storage.Storage.BlobTargetOption.doesNotExist;
import static io.trino.filesystem.encryption.EncryptionKey.randomAes256;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Assertions.assertThat;

@TestInstance(TestInstance.Lifecycle.PER_CLASS)
Expand All @@ -48,11 +47,6 @@ public abstract class AbstractTestGcsFileSystem
private Storage storage;
private GcsFileSystemFactory gcsFileSystemFactory;

protected static String getRequiredEnvironmentVariable(String name)
{
return requireNonNull(System.getenv(name), "Environment variable not set: " + name);
}

protected void initialize(String gcpCredentialKey)
throws IOException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.io.IOException;

import static io.trino.testing.SystemEnvUtils.requireEnv;
import static java.nio.charset.StandardCharsets.UTF_8;

@TestInstance(TestInstance.Lifecycle.PER_CLASS)
Expand All @@ -30,7 +31,7 @@ public class TestGcsFileSystem
void setup()
throws IOException
{
initialize(getRequiredEnvironmentVariable("GCP_CREDENTIALS_KEY"));
initialize(requireEnv("GCP_CREDENTIALS_KEY"));
}

@Test
Expand Down
6 changes: 6 additions & 0 deletions lib/trino-filesystem-s3/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,12 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>io.trino</groupId>
<artifactId>trino-testing-services</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import java.util.List;

import static com.google.common.collect.Iterables.getOnlyElement;
import static java.util.Objects.requireNonNull;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static org.assertj.core.api.Assertions.assertThat;

public class TestS3FileSystemAwsS3
Expand All @@ -43,10 +43,10 @@ public class TestS3FileSystemAwsS3
@Override
protected void initEnvironment()
{
accessKey = environmentVariable("AWS_ACCESS_KEY_ID");
secretKey = environmentVariable("AWS_SECRET_ACCESS_KEY");
region = environmentVariable("AWS_REGION");
bucket = environmentVariable("EMPTY_S3_BUCKET");
accessKey = requireEnv("AWS_ACCESS_KEY_ID");
secretKey = requireEnv("AWS_SECRET_ACCESS_KEY");
region = requireEnv("AWS_REGION");
bucket = requireEnv("EMPTY_S3_BUCKET");
}

@Override
Expand Down Expand Up @@ -75,11 +75,6 @@ protected S3FileSystemFactory createS3FileSystemFactory()
.setStreamingPartSize(DataSize.valueOf("5.5MB")), new S3FileSystemStats());
}

private static String environmentVariable(String name)
{
return requireNonNull(System.getenv(name), "Environment variable not set: " + name);
}

@Test
void testS3FileIteratorFileEntryTags()
throws IOException
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import java.util.function.Function;

import static io.trino.filesystem.s3.S3FileSystemConfig.S3SseType.CUSTOMER;
import static java.util.Objects.requireNonNull;
import static io.trino.testing.SystemEnvUtils.requireEnv;

public class TestS3FileSystemAwsS3WithSseCustomerKey
extends AbstractTestS3FileSystem
Expand All @@ -49,10 +49,10 @@ public class TestS3FileSystemAwsS3WithSseCustomerKey
@Override
protected void initEnvironment()
{
accessKey = environmentVariable("AWS_ACCESS_KEY_ID");
secretKey = environmentVariable("AWS_SECRET_ACCESS_KEY");
region = environmentVariable("AWS_REGION");
bucket = environmentVariable("EMPTY_S3_BUCKET");
accessKey = requireEnv("AWS_ACCESS_KEY_ID");
secretKey = requireEnv("AWS_SECRET_ACCESS_KEY");
region = requireEnv("AWS_REGION");
bucket = requireEnv("EMPTY_S3_BUCKET");
s3SseCustomerKey = S3SseCustomerKey.onAes256(CUSTOMER_KEY);
}

Expand Down Expand Up @@ -109,11 +109,6 @@ protected S3FileSystemFactory createS3FileSystemFactory()
new S3FileSystemStats());
}

private static String environmentVariable(String name)
{
return requireNonNull(System.getenv(name), "Environment variable not set: " + name);
}

private static String generateCustomerKey()
{
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import org.apache.hadoop.conf.Configuration;

import static java.util.Objects.requireNonNull;
import static io.trino.testing.SystemEnvUtils.requireEnv;

/**
* Tests file system operations on AWS S3 storage.
Expand All @@ -31,8 +31,8 @@ public class TestTrinoS3FileSystemAwsS3

public TestTrinoS3FileSystemAwsS3()
{
bucketName = requireNonNull(System.getenv("S3_BUCKET"), "Environment S3_BUCKET was not set");
s3Endpoint = requireNonNull(System.getenv("S3_BUCKET_ENDPOINT"), "Environment S3_BUCKET_ENDPOINT was not set");
bucketName = requireEnv("S3_BUCKET");
s3Endpoint = requireEnv("S3_BUCKET_ENDPOINT");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static java.util.Objects.requireNonNull;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;

@TestInstance(PER_CLASS)
Expand All @@ -45,7 +45,7 @@ protected QueryRunner createQueryRunner()
schema = "test_tables_with_custom_location" + randomNameSuffix();
return DeltaLakeQueryRunner.builder(schema)
.addDeltaProperty("hive.metastore", "glue")
.addDeltaProperty("hive.metastore.glue.region", requireNonNull(System.getenv("AWS_REGION"), "AWS_REGION is null"))
.addDeltaProperty("hive.metastore.glue.region", requireEnv("AWS_REGION"))
.addDeltaProperty("hive.metastore.glue.default-warehouse-dir", warehouseDir.toUri().toString())
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static java.util.Objects.requireNonNull;

public class TestDeltaLakeRegisterTableProcedureWithGlue
extends BaseDeltaLakeRegisterTableProcedureTest
Expand All @@ -44,7 +44,7 @@ protected QueryRunner createQueryRunner()
schema = "test_delta_lake_register_table" + randomNameSuffix();
return DeltaLakeQueryRunner.builder(schema)
.addDeltaProperty("hive.metastore", "glue")
.addDeltaProperty("hive.metastore.glue.region", requireNonNull(System.getenv("AWS_REGION"), "AWS_REGION is null"))
.addDeltaProperty("hive.metastore.glue.region", requireEnv("AWS_REGION"))
.addDeltaProperty("hive.metastore.glue.default-warehouse-dir", warehouseDir.toUri().toString())
.addDeltaProperty("delta.unique-table-location", "true")
.addDeltaProperty("delta.register-table-procedure.enabled", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;

@TestInstance(PER_CLASS)
Expand All @@ -52,7 +52,7 @@ protected QueryRunner createQueryRunner()
schema = "test_delta_lake_glue_views_" + randomNameSuffix();
return DeltaLakeQueryRunner.builder(schema)
.addDeltaProperty("hive.metastore", "glue")
.addDeltaProperty("hive.metastore.glue.region", requireNonNull(System.getenv("AWS_REGION"), "AWS_REGION is null"))
.addDeltaProperty("hive.metastore.glue.region", requireEnv("AWS_REGION"))
.addDeltaProperty("hive.metastore.glue.default-warehouse-dir", warehouseDir.toUri().toString())
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@

import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
import static java.util.Objects.requireNonNull;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static org.assertj.core.api.Assertions.assertThat;

public class TestDeltaS3AndGlueMetastoreTest
extends BaseS3AndGlueMetastoreTest
{
public TestDeltaS3AndGlueMetastoreTest()
{
super("partitioned_by", "location", requireNonNull(System.getenv("S3_BUCKET"), "Environment variable not set: S3_BUCKET"));
super("partitioned_by", "location", requireEnv("S3_BUCKET"));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
import static io.trino.plugin.hive.TestingHiveUtils.getConnectorService;
import static io.trino.spi.security.SelectedRole.Type.ROLE;
import static io.trino.testing.MaterializedResult.resultBuilder;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.testing.TestingSession.testSessionBuilder;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;

Expand All @@ -45,7 +45,7 @@ public class TestHiveS3AndGlueMetastoreTest
{
public TestHiveS3AndGlueMetastoreTest()
{
super("partitioned_by", "external_location", requireNonNull(System.getenv("S3_BUCKET"), "Environment S3_BUCKET was not set"));
super("partitioned_by", "external_location", requireEnv("S3_BUCKET"));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

import static io.trino.plugin.hive.TestingThriftHiveMetastoreBuilder.testingThriftHiveMetastoreBuilder;
import static io.trino.plugin.hive.metastore.thrift.ThriftHttpMetastoreConfig.AuthenticationMode.BEARER;
import static java.util.Objects.requireNonNull;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static org.assertj.core.api.Assertions.assertThat;

final class TestUnityMetastore
Expand All @@ -36,9 +36,9 @@ final class TestUnityMetastore
void test()
throws Exception
{
String databricksHost = requireNonNull(System.getenv("DATABRICKS_HOST"), "Environment variable not set: DATABRICKS_HOST");
String databricksToken = requireNonNull(System.getenv("DATABRICKS_TOKEN"), "Environment variable not set: DATABRICKS_TOKEN");
String databricksCatalogName = requireNonNull(System.getenv("DATABRICKS_UNITY_CATALOG_NAME"), "Environment variable not set: DATABRICKS_UNITY_CATALOG_NAME");
String databricksHost = requireEnv("DATABRICKS_HOST");
String databricksToken = requireEnv("DATABRICKS_TOKEN");
String databricksCatalogName = requireEnv("DATABRICKS_UNITY_CATALOG_NAME");
URI metastoreUri = URI.create("https://%s:443/api/2.0/unity-hms-proxy/metadata" .formatted(databricksHost));

ThriftHttpMetastoreConfig config = new ThriftHttpMetastoreConfig()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@
import static io.trino.plugin.hive.metastore.glue.v1.converter.GlueToTrinoConverter.getTableParameters;
import static io.trino.plugin.hive.metastore.glue.v1.converter.GlueToTrinoConverter.getTableType;
import static io.trino.plugin.iceberg.IcebergTestUtils.checkParquetFileSorting;
import static io.trino.testing.SystemEnvUtils.requireEnv;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
Expand All @@ -82,7 +82,7 @@ public class TestIcebergGlueCatalogConnectorSmokeTest
public TestIcebergGlueCatalogConnectorSmokeTest()
{
super(FileFormat.PARQUET);
this.bucketName = requireNonNull(System.getenv("S3_BUCKET"), "Environment S3_BUCKET was not set");
this.bucketName = requireEnv("S3_BUCKET");
this.schemaName = "test_iceberg_smoke_" + randomNameSuffix();
glueClient = AWSGlueAsyncClientBuilder.defaultClient();

Expand Down
Loading

0 comments on commit a596b43

Please sign in to comment.