Skip to content

Commit

Permalink
[HUDI-7197] Adding mis fixes related with table services testing (apa…
Browse files Browse the repository at this point in the history
  • Loading branch information
harsh1231 authored Dec 11, 2023
1 parent 16253db commit 08036ac
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ private HoodieCleanMetadata runClean(HoodieTable<T, I, K, O> table, HoodieInstan
throw new HoodieIOException("Failed to clean up after commit", e);
} finally {
if (!skipLocking) {
this.txnManager.endTransaction(Option.of(inflightInstant));
this.txnManager.endTransaction(Option.ofNullable(inflightInstant));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,7 @@ protected HoodieWriteMetadata<HoodieData<WriteStatus>> executeClustering(HoodieC
writeMetadata.setPartitionToReplaceFileIds(getPartitionToReplacedFileIds(clusteringPlan, writeMetadata));
commitOnAutoCommit(writeMetadata);
if (!writeMetadata.getCommitMetadata().isPresent()) {
LOG.info("Found empty commit metadata for clustering with instant time " + instantTime);
HoodieCommitMetadata commitMetadata = CommitUtils.buildMetadata(writeMetadata.getWriteStats().get(), writeMetadata.getPartitionToReplaceFileIds(),
extraMetadata, operationType, getSchemaToStoreInCommit(), getCommitActionType());
writeMetadata.setCommitMetadata(Option.of(commitMetadata));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,7 @@ public static int retry(int maxRetryCount, CheckedSupplier<Integer> supplier, St
} while (ret != 0 && maxRetryCount-- > 0);
} catch (Throwable t) {
LOG.error(errorMessage, t);
throw new RuntimeException("Failed in retry", t);
}
return ret;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.metadata.HoodieTableMetadataUtil;
import org.apache.hudi.metadata.MetadataPartitionType;
import org.apache.hudi.testutils.SparkClientFunctionalTestHarness;
Expand Down Expand Up @@ -67,6 +68,7 @@
import static org.apache.hudi.utilities.UtilHelpers.SCHEDULE_AND_EXECUTE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;

public class TestHoodieIndexer extends SparkClientFunctionalTestHarness implements SparkProvider {
Expand Down Expand Up @@ -204,7 +206,10 @@ public void testIndexerForExceptionWithNonFilesPartition() {
config.propsFilePath = propsPath;
// start the indexer and validate index building fails
HoodieIndexer indexer = new HoodieIndexer(jsc(), config);
assertEquals(-1, indexer.start(0));
Throwable cause = assertThrows(RuntimeException.class, () -> indexer.start(0))
.getCause();
assertTrue(cause instanceof HoodieException);
assertTrue(cause.getMessage().contains("Metadata table is not yet initialized"));

// validate table config
metaClient = reload(metaClient);
Expand Down

0 comments on commit 08036ac

Please sign in to comment.