Skip to content

Commit

Permalink
Revert "[ENG-4369] Adding mis fixes related with table services testi…
Browse files Browse the repository at this point in the history
…ng" (apache#482)
  • Loading branch information
harsh1231 authored Dec 11, 2023
1 parent 5f7b365 commit 16253db
Show file tree
Hide file tree
Showing 7 changed files with 4 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ private HoodieCleanMetadata runClean(HoodieTable<T, I, K, O> table, HoodieInstan
throw new HoodieIOException("Failed to clean up after commit", e);
} finally {
if (!skipLocking) {
this.txnManager.endTransaction(Option.ofNullable(inflightInstant));
this.txnManager.endTransaction(Option.of(inflightInstant));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,6 @@ protected HoodieWriteMetadata<HoodieData<WriteStatus>> executeClustering(HoodieC
writeMetadata.setPartitionToReplaceFileIds(getPartitionToReplacedFileIds(clusteringPlan, writeMetadata));
commitOnAutoCommit(writeMetadata);
if (!writeMetadata.getCommitMetadata().isPresent()) {
LOG.info("Found empty commit metadata for clustering with instant time " + instantTime);
HoodieCommitMetadata commitMetadata = CommitUtils.buildMetadata(writeMetadata.getWriteStats().get(), writeMetadata.getPartitionToReplaceFileIds(),
extraMetadata, operationType, getSchemaToStoreInCommit(), getCommitActionType());
writeMetadata.setCommitMetadata(Option.of(commitMetadata));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,14 +123,6 @@ public HoodieTimeline filterCompletedAndCompactionInstants() {
|| s.getAction().equals(HoodieTimeline.COMPACTION_ACTION)), details);
}

@Override
public HoodieTimeline filterCompactionInstants() {
return new HoodieDefaultTimeline(instants.stream().filter(s ->
s.getAction().equals(HoodieTimeline.COMPACTION_ACTION)
|| (s.isCompleted() && s.getAction().equals(HoodieTimeline.COMMIT_ACTION))),
details);
}

@Override
public HoodieTimeline filterCompletedOrMajorOrMinorCompactionInstants() {
return new HoodieDefaultTimeline(instants.stream().filter(s -> s.isCompleted()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,6 @@ public interface HoodieTimeline extends Serializable {
*/
HoodieTimeline filterCompletedAndCompactionInstants();

HoodieTimeline filterCompactionInstants();

HoodieTimeline filterCompletedOrMajorOrMinorCompactionInstants();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,8 @@ public static List<HoodieInstant> getPendingCompactionInstantTimes(HoodieTableMe
*/
public static Option<Pair<HoodieTimeline, HoodieInstant>> getDeltaCommitsSinceLatestCompaction(
HoodieActiveTimeline activeTimeline) {
Option<HoodieInstant> lastCompaction = activeTimeline.filterCompactionInstants().filterCompletedInstants().lastInstant();
Option<HoodieInstant> lastCompaction = activeTimeline.getCommitTimeline()
.filterCompletedInstants().lastInstant();
HoodieTimeline deltaCommits = activeTimeline.getDeltaCommitTimeline();

HoodieInstant latestInstant;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,6 @@ public static int retry(int maxRetryCount, CheckedSupplier<Integer> supplier, St
} while (ret != 0 && maxRetryCount-- > 0);
} catch (Throwable t) {
LOG.error(errorMessage, t);
throw new RuntimeException("Failed in retry", t);
}
return ret;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.metadata.HoodieTableMetadataUtil;
import org.apache.hudi.metadata.MetadataPartitionType;
import org.apache.hudi.testutils.SparkClientFunctionalTestHarness;
Expand Down Expand Up @@ -68,7 +67,6 @@
import static org.apache.hudi.utilities.UtilHelpers.SCHEDULE_AND_EXECUTE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;

public class TestHoodieIndexer extends SparkClientFunctionalTestHarness implements SparkProvider {
Expand Down Expand Up @@ -206,10 +204,7 @@ public void testIndexerForExceptionWithNonFilesPartition() {
config.propsFilePath = propsPath;
// start the indexer and validate index building fails
HoodieIndexer indexer = new HoodieIndexer(jsc(), config);
Throwable cause = assertThrows(RuntimeException.class, () -> indexer.start(0))
.getCause();
assertTrue(cause instanceof HoodieException);
assertTrue(cause.getMessage().contains("Metadata table is not yet initialized"));
assertEquals(-1, indexer.start(0));

// validate table config
metaClient = reload(metaClient);
Expand Down

0 comments on commit 16253db

Please sign in to comment.