Skip to content

Commit

Permalink
Modify OA doRehash() functions to properly debit rehashCredits (#…
Browse files Browse the repository at this point in the history
…3211)

* Modify doRehash() function to prevent rehash starvation, add unit tests for bug reproduction

* subtract added entries from rehash equity

* cleanup and spotless
  • Loading branch information
lbooker42 authored Dec 16, 2022
1 parent 6ced5cd commit 05c140e
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -276,14 +276,18 @@ protected void buildTable(

while (rsIt.hasMore()) {
final RowSequence chunkOk = rsIt.getNextRowSequenceWithLength(bc.chunkSize);

while (doRehash(initialBuild, bc.rehashCredits, chunkOk.intSize())) {
migrateFront();
}

getKeyChunks(buildSources, bc.getContexts, sourceKeyChunks, chunkOk);

final long oldEntries = numEntries;
buildHandler.doBuild(chunkOk, sourceKeyChunks);
final long entriesAdded = numEntries - oldEntries;
// if we actually added anything, then take away from the "equity" we've built up rehashing, otherwise
// don't penalize this build call with additional rehashing
bc.rehashCredits.subtract(entriesAdded);

bc.resetSharedContexts();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,12 @@ protected void buildTable(

getKeyChunks(buildSources, bc.getContexts, sourceKeyChunks, chunkOk);

final long oldEntries = numEntries;
buildHandler.doBuild(chunkOk, sourceKeyChunks);
final long entriesAdded = numEntries - oldEntries;
// if we actually added anything, then take away from the "equity" we've built up rehashing, otherwise
// don't penalize this build call with additional rehashing
bc.rehashCredits.subtract(entriesAdded);

bc.resetSharedContexts();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,14 +214,18 @@ protected void buildTable(

while (rsIt.hasMore()) {
final RowSequence chunkOk = rsIt.getNextRowSequenceWithLength(bc.chunkSize);

while (doRehash(initialBuild, bc.rehashCredits, chunkOk.intSize(), outputPositions)) {
migrateFront(outputPositions);
}

getKeyChunks(buildSources, bc.getContexts, sourceKeyChunks, chunkOk);

final long oldEntries = numEntries;
buildHandler.doBuild(chunkOk, sourceKeyChunks);
final long entriesAdded = numEntries - oldEntries;
// if we actually added anything, then take away from the "equity" we've built up rehashing, otherwise
// don't penalize this build call with additional rehashing
bc.rehashCredits.subtract(entriesAdded);

bc.resetSharedContexts();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import java.util.function.BiConsumer;
import java.util.function.Function;

import static io.deephaven.engine.testutil.GenerateTableUpdates.generateAppends;
import static io.deephaven.engine.testutil.TstUtils.*;
import static io.deephaven.engine.util.TableTools.*;
import static io.deephaven.util.QueryConstants.NULL_INT;
Expand Down Expand Up @@ -1514,6 +1515,65 @@ public void testSymbolTableJoin() throws IOException {
});
}

/** Test #1 for DHC issue #3202 */
public void testDHC3202_v1() throws IOException {
// flood the hashtable with large updates
final Random random = new Random(0x31313131);

final ColumnInfo[] leftColumnInfo;
final QueryTable leftTable = getTable(true, 0, random,
leftColumnInfo = initColumnInfos(new String[] {"idx", "LeftValue"},
new UniqueIntGenerator(0, 100_000_000),
new IntGenerator(10_000_000, 10_010_000)));

final ColumnInfo[] rightColumnInfo;
final QueryTable rightTable = getTable(true, 0, random,
rightColumnInfo = initColumnInfos(new String[] {"idx", "RightValue"},
new UniqueIntGenerator(0, 100_000_000),
new IntGenerator(20_000_000, 20_010_000)));


final Table joinTable = leftTable.naturalJoin(rightTable, "idx=idx", "RightValue");

for (int ii = 0; ii < 10; ii++) {
UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(
() -> {
generateAppends(10_000, random, leftTable, leftColumnInfo);
generateAppends(10_000, random, rightTable, rightColumnInfo);
});
}
}

/** Test #1 for DHC issue #3202 */
public void testDHC3202_v2() throws IOException {
// flood the hashtable with large updates
final Random random = new Random(0x31313131);

final ColumnInfo[] leftColumnInfo;
final QueryTable leftTable = getTable(true, 0, random,
leftColumnInfo = initColumnInfos(new String[] {"idx", "LeftValue"},
new UniqueIntGenerator(0, 100_000_000),
new IntGenerator(10_000_000, 10_010_000)));

final ColumnInfo[] rightColumnInfo;
final QueryTable rightTable = getTable(true, 0, random,
rightColumnInfo = initColumnInfos(new String[] {"idx", "RightValue"},
new UniqueIntGenerator(0, 100_000_000),
new IntGenerator(20_000_000, 20_010_000)));


final Table joinTable = leftTable.naturalJoin(rightTable, "idx=idx", "RightValue");

for (int ii = 0; ii < 10; ii++) {
UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(
() -> {
generateAppends(100_000, random, leftTable, leftColumnInfo);
generateAppends(100_000, random, rightTable, rightColumnInfo);
});
}
}


private void diskBackedTestHarness(BiConsumer<Table, Table> testFunction) throws IOException {
final File leftDirectory = Files.createTempDirectory("QueryTableJoinTest-Left").toFile();
final File rightDirectory = Files.createTempDirectory("QueryTableJoinTest-Right").toFile();
Expand Down

0 comments on commit 05c140e

Please sign in to comment.