Skip to content

Commit

Permalink
Change scope of overridable methods in BaseDeltaLakeConnectorSmokeTes…
Browse files Browse the repository at this point in the history
…t to protected
  • Loading branch information
homar authored and findepi committed May 31, 2022
1 parent d4ea782 commit 5771df6
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public abstract class BaseDeltaLakeAwsConnectorSmokeTest
protected DockerizedMinioDataLake dockerizedMinioDataLake;

@Override
DockerizedDataLake createDockerizedDataLake()
protected DockerizedDataLake createDockerizedDataLake()
{
dockerizedMinioDataLake = new DockerizedMinioDataLake(
bucketName,
Expand All @@ -40,7 +40,7 @@ DockerizedDataLake createDockerizedDataLake()
}

@Override
void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner)
protected void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner)
{
dockerizedMinioDataLake.copyResources(resourcePath, table);
queryRunner.execute(format("CREATE TABLE %s (dummy int) WITH (location = '%s')",
Expand All @@ -49,13 +49,13 @@ void createTableFromResources(String table, String resourcePath, QueryRunner que
}

@Override
String getLocationForTable(String bucketName, String tableName)
protected String getLocationForTable(String bucketName, String tableName)
{
return format("s3://%s/%s", bucketName, tableName);
}

@Override
List<String> getTableFiles(String tableName)
protected List<String> getTableFiles(String tableName)
{
return dockerizedMinioDataLake.listFiles(tableName).stream()
.map(path -> format("s3://%s/%s", bucketName, path))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,19 +104,19 @@ public abstract class BaseDeltaLakeConnectorSmokeTest

protected DockerizedDataLake dockerizedDataLake;

abstract DockerizedDataLake createDockerizedDataLake()
protected abstract DockerizedDataLake createDockerizedDataLake()
throws Exception;

abstract QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
protected abstract QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
throws Exception;

abstract void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner);
protected abstract void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner);

abstract String getLocationForTable(String bucketName, String tableName);
protected abstract String getLocationForTable(String bucketName, String tableName);

abstract List<String> getTableFiles(String tableName);
protected abstract List<String> getTableFiles(String tableName);

abstract List<String> listCheckpointFiles(String transactionLogDirectory);
protected abstract List<String> listCheckpointFiles(String transactionLogDirectory);

@Override
protected QueryRunner createQueryRunner()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public TestDeltaLakeAdlsConnectorSmokeTest(String container, String account, Str
}

@Override
DockerizedDataLake createDockerizedDataLake()
protected DockerizedDataLake createDockerizedDataLake()
throws Exception
{
String abfsSpecificCoreSiteXmlContent = Resources.toString(Resources.getResource("io/trino/plugin/deltalake/hdp3.1-core-site.xml.abfs-template"), UTF_8)
Expand All @@ -96,7 +96,7 @@ DockerizedDataLake createDockerizedDataLake()
}

@Override
QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
protected QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
throws Exception
{
return createAbfsDeltaLakeQueryRunner(DELTA_CATALOG, SCHEMA, ImmutableMap.of(), connectorProperties, dockerizedDataLake.getTestingHadoop());
Expand All @@ -118,7 +118,7 @@ public void removeTestData()
}

@Override
void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner)
protected void createTableFromResources(String table, String resourcePath, QueryRunner queryRunner)
{
String targetDirectory = bucketName + "/" + table;

Expand All @@ -142,13 +142,13 @@ void createTableFromResources(String table, String resourcePath, QueryRunner que
}

@Override
String getLocationForTable(String bucketName, String tableName)
protected String getLocationForTable(String bucketName, String tableName)
{
return bucketUrl() + tableName;
}

@Override
List<String> getTableFiles(String tableName)
protected List<String> getTableFiles(String tableName)
{
return listAllFilesRecursive(tableName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public class TestDeltaLakeConnectorSmokeTest
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapperProvider().get();

@Override
QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
protected QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
throws Exception
{
verify(!new ParquetWriterConfig().isParquetOptimizedWriterEnabled(), "This test assumes the optimized Parquet writer is disabled by default");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public class TestDeltaLakeOptimizedWriterConnectorSmokeTest
extends BaseDeltaLakeAwsConnectorSmokeTest
{
@Override
QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
protected QueryRunner createDeltaLakeQueryRunner(Map<String, String> connectorProperties)
throws Exception
{
return DeltaLakeQueryRunner.createS3DeltaLakeQueryRunner(
Expand Down

0 comments on commit 5771df6

Please sign in to comment.