Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

hive test cleanup #21246

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -427,10 +427,8 @@ public WorkProcessor.TransformationState<PagesIndex> process(Page input)
processEmptyInput = false;
return WorkProcessor.TransformationState.ofResult(pagesIndex, false);
}
else {
memoryContext.close();
return WorkProcessor.TransformationState.finished();
}
memoryContext.close();
return WorkProcessor.TransformationState.finished();
}

// there is input, so we are not interested in processing empty input
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1496,7 +1496,7 @@ private static void createTestFileHive(
configureCompression(jobConf, compressionCodec);

File file = File.createTempFile("trino_test", "data");
file.delete();
verify(file.delete());
try {
FileSinkOperator.RecordWriter recordWriter = outputFormat.getHiveRecordWriter(
jobConf,
Expand Down Expand Up @@ -1538,7 +1538,7 @@ private static void createTestFileHive(
}
}
finally {
file.delete();
verify(file.delete());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand All @@ -95,6 +91,7 @@
import java.util.function.Supplier;
import java.util.stream.Collectors;

import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertBetweenInclusive;
Expand Down Expand Up @@ -128,8 +125,6 @@
import static org.apache.hadoop.hive.ql.io.orc.CompressionKind.ZLIB;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector;
import static org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC;
import static org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_TYPE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.joda.time.DateTimeZone.UTC;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
Expand Down Expand Up @@ -163,14 +158,14 @@ public void setUp()
throws Exception
{
tempFile = File.createTempFile("trino_test_orc_page_source_memory_tracking", "orc");
tempFile.delete();
verify(tempFile.delete());
testPreparer = new TestPreparer(tempFile.getAbsolutePath());
}

@AfterAll
public void tearDown()
{
tempFile.delete();
verify(tempFile.delete());
}

@Test
Expand Down Expand Up @@ -354,7 +349,7 @@ private void testMaxReadBytes(int rowCount)
}
List<TestColumn> testColumns = columnBuilder.build();
File tempFile = File.createTempFile("trino_test_orc_page_source_max_read_bytes", "orc");
tempFile.delete();
verify(tempFile.delete());

TestPreparer testPreparer = new TestPreparer(tempFile.getAbsolutePath(), testColumns, rowCount, rowCount);
ConnectorPageSource pageSource = testPreparer.newPageSource(stats, session);
Expand Down Expand Up @@ -386,106 +381,105 @@ private void testMaxReadBytes(int rowCount)
pageSource.close();
}
finally {
tempFile.delete();
verify(tempFile.delete());
}
}

@Test
public void testTableScanOperator()
throws Exception
{
// Numbers used in assertions in this test may change when implementation is modified,
// feel free to change them if they break in the future

DriverContext driverContext = testPreparer.newDriverContext();
SourceOperator operator = testPreparer.newTableScanOperator(driverContext);

assertThat(driverContext.getMemoryUsage()).isEqualTo(0);

long memoryUsage = -1;
int totalRows = 0;
while (totalRows < 20000) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
page.getBlock(1);
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 460000L, 469999L);
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);
try (SourceOperator operator = testPreparer.newTableScanOperator(driverContext)) {
assertThat(driverContext.getMemoryUsage()).isEqualTo(0);

long memoryUsage = -1;
int totalRows = 0;
while (totalRows < 20000) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 460000L, 469999L);
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);
}
totalRows += page.getPositionCount();
}
totalRows += page.getPositionCount();
}

memoryUsage = -1;
while (totalRows < 40000) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
page.getBlock(1);
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 460000L, 469999L);
memoryUsage = -1;
while (totalRows < 40000) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 460000L, 469999L);
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);
}
totalRows += page.getPositionCount();
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);

memoryUsage = -1;
while (totalRows < NUM_ROWS) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 360000L, 369999L);
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);
}
totalRows += page.getPositionCount();
}
totalRows += page.getPositionCount();
}

memoryUsage = -1;
while (totalRows < NUM_ROWS) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
page.getBlock(1);
if (memoryUsage == -1) {
memoryUsage = driverContext.getMemoryUsage();
assertBetweenInclusive(memoryUsage, 360000L, 369999L);
}
else {
assertThat(driverContext.getMemoryUsage()).isEqualTo(memoryUsage);
}
totalRows += page.getPositionCount();
assertThat(operator.getOutput()).isNull();
assertThat(operator.isFinished()).isTrue();
assertThat(driverContext.getMemoryUsage()).isEqualTo(0);
}

assertThat(operator.isFinished()).isFalse();
assertThat(operator.getOutput()).isNull();
assertThat(operator.isFinished()).isTrue();
assertThat(driverContext.getMemoryUsage()).isEqualTo(0);
}

@Test
public void testScanFilterAndProjectOperator()
throws Exception
{
// Numbers used in assertions in this test may change when implementation is modified,
// feel free to change them if they break in the future

DriverContext driverContext = testPreparer.newDriverContext();
SourceOperator operator = testPreparer.newScanFilterAndProjectOperator(driverContext);
try (SourceOperator operator = testPreparer.newScanFilterAndProjectOperator(driverContext)) {
assertThat(driverContext.getMemoryUsage()).isEqualTo(0);

assertThat(driverContext.getMemoryUsage()).isEqualTo(0);
int totalRows = 0;
while (totalRows < NUM_ROWS) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();

int totalRows = 0;
while (totalRows < NUM_ROWS) {
assertThat(operator.isFinished()).isFalse();
Page page = operator.getOutput();
assertThat(page).isNotNull();
// memory usage varies depending on stripe alignment
long memoryUsage = driverContext.getMemoryUsage();
assertThat(memoryUsage < 1000 || (memoryUsage > 150_000 && memoryUsage < 630_000))
.describedAs(format("Memory usage (%s) outside of bounds", memoryUsage))
.isTrue();

// memory usage varies depending on stripe alignment
long memoryUsage = driverContext.getMemoryUsage();
assertThat(memoryUsage < 1000 || (memoryUsage > 150_000 && memoryUsage < 630_000))
.describedAs(format("Memory usage (%s) outside of bounds", memoryUsage))
.isTrue();
totalRows += page.getPositionCount();
}

totalRows += page.getPositionCount();
// done... in the current implementation finish is not set until output returns a null page
assertThat(operator.getOutput()).isNull();
assertThat(operator.isFinished()).isTrue();
assertBetweenInclusive(driverContext.getMemoryUsage(), 0L, 500L);
}

// done... in the current implementation finish is not set until output returns a null page
assertThat(operator.getOutput()).isNull();
assertThat(operator.isFinished()).isTrue();
assertBetweenInclusive(driverContext.getMemoryUsage(), 0L, 500L);
}

private class TestPreparer
Expand Down Expand Up @@ -541,7 +535,7 @@ public TestPreparer(String tempFilePath, List<TestColumn> testColumns, int numRo
columns = columnsBuilder.build();
types = typesBuilder.build();

fileSplit = createTestFile(tempFilePath, serde, null, testColumns, numRows, stripeRows);
fileSplit = createTestFile(tempFilePath, serde, testColumns, numRows, stripeRows);
}

public long getFileSize()
Expand Down Expand Up @@ -644,7 +638,6 @@ private DriverContext newDriverContext()
private static FileSplit createTestFile(
String filePath,
Serializer serializer,
String compressionCodec,
List<TestColumn> testColumns,
int numRows,
int stripeRows)
Expand All @@ -669,14 +662,6 @@ private static FileSplit createTestFile(
.collect(Collectors.joining(",")));

serializer.initialize(CONFIGURATION, tableProperties);

JobConf jobConf = new JobConf(new Configuration(false));
if (compressionCodec != null) {
CompressionCodec codec = new CompressionCodecFactory(CONFIGURATION).getCodecByName(compressionCodec);
jobConf.set(COMPRESS_CODEC, codec.getClass().getName());
jobConf.set(COMPRESS_TYPE, SequenceFile.CompressionType.BLOCK.toString());
}

RecordWriter recordWriter = createRecordWriter(new Path(filePath), CONFIGURATION);

try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public TestingHiveConnectorFactory(Path localFileSystemRootPath, Optional<HiveMe
{
this.metastore = requireNonNull(metastore, "metastore is null");

localFileSystemRootPath.toFile().mkdirs();
boolean ignored = localFileSystemRootPath.toFile().mkdirs();
this.module = binder -> {
newMapBinder(binder, String.class, TrinoFileSystemFactory.class)
.addBinding("local").toInstance(new LocalFileSystemFactory(localFileSystemRootPath));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ final class TestFileHiveMetastore
throws IOException
{
tempDir = createTempDirectory("test");
tempDir.toFile().mkdirs();
LocalFileSystemFactory fileSystemFactory = new LocalFileSystemFactory(tempDir);

metastore = new FileHiveMetastore(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import java.io.IOException;
import java.nio.file.Path;

import static com.google.common.base.Verify.verify;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
Expand All @@ -33,7 +34,7 @@ final class TestGlueHiveMetastore
throws IOException
{
tempDir = createTempDirectory("test");
tempDir.toFile().mkdirs();
verify(tempDir.toFile().mkdirs());
metastore = createTestingGlueHiveMetastore(tempDir);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,7 @@ public void setUp()
new HiveMetastoreConfig().isHideDeltaLakeTables(),
new FileHiveMetastoreConfig()
.setCatalogDirectory(tmpDir.toString())
.setDisableLocationChecks(true)
/*.setMetastoreUser("test")*/);
.setDisableLocationChecks(true));

metastore.createDatabase(Database.builder()
.setDatabaseName("default")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@

import java.io.File;

import static com.google.common.base.Verify.verify;

public final class TestingFileHiveMetastore
{
private TestingFileHiveMetastore() {}

public static FileHiveMetastore createTestingFileHiveMetastore(File catalogDirectory)
{
catalogDirectory.mkdirs();
verify(catalogDirectory.mkdirs());
return createTestingFileHiveMetastore(
new LocalFileSystemFactory(catalogDirectory.toPath()),
Location.of("local:///"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public TestingIcebergConnectorFactory(
Path localFileSystemRootPath,
Optional<Module> icebergCatalogModule)
{
localFileSystemRootPath.toFile().mkdirs();
boolean ignored = localFileSystemRootPath.toFile().mkdirs();
this.icebergCatalogModule = requireNonNull(icebergCatalogModule, "icebergCatalogModule is null");
this.module = binder -> {
newMapBinder(binder, String.class, TrinoFileSystemFactory.class)
Expand Down