From ea8eb5c962295736a992743e65e36730094bea2b Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 31 Jan 2023 11:08:45 -0700 Subject: [PATCH 01/14] Port DH-11692: Add native support for java.time types --- .../io/deephaven/util/codec/CodecCache.java | 3 + .../util/codec/ZonedDateTimeCodec.java | 96 ++ .../io/deephaven/util/type/TypeUtils.java | 10 +- .../util/codec/ZonedDateTimeCodecTest.java | 61 + .../deephaven/engine/table/ElementSource.java | 124 ++ .../table/impl/AbstractColumnSource.java | 4 +- .../table/impl/ColumnSourceGetDefaults.java | 17 +- .../ImmutableColumnSourceGetDefaults.java | 9 + .../impl/MutableColumnSourceGetDefaults.java | 17 +- .../table/impl/TableUpdateValidator.java | 9 +- .../impl/UnboxedDateTimeWritableSource.java | 9 +- ...ava => UnboxedLongBackedColumnSource.java} | 22 +- .../by/ByteChunkedAddOnlyMinMaxOperator.java | 4 + .../by/CharChunkedAddOnlyMinMaxOperator.java | 4 + .../DoubleChunkedAddOnlyMinMaxOperator.java | 4 + .../by/FloatChunkedAddOnlyMinMaxOperator.java | 4 + .../by/IntChunkedAddOnlyMinMaxOperator.java | 4 + .../by/LongChunkedAddOnlyMinMaxOperator.java | 25 +- .../ObjectChunkedAddOnlyMinMaxOperator.java | 4 + .../by/ShortChunkedAddOnlyMinMaxOperator.java | 4 + .../impl/CompositeTableDataService.java | 240 --- ...iteTableDataServiceConsistencyMonitor.java | 44 - .../impl/preview/ColumnPreviewManager.java | 6 +- .../table/impl/remote/ConstructSnapshot.java | 5 +- .../impl/remote/InitialSnapshotTable.java | 5 +- .../table/impl/select/DhFormulaColumn.java | 6 +- .../impl/select/ReinterpretedColumn.java | 182 ++- .../impl/sources/AbstractLongArraySource.java | 784 --------- .../AbstractSparseLongArraySource.java | 965 ----------- .../impl/sources/ArrayBackedColumnSource.java | 34 +- .../table/impl/sources/ArraySourceHelper.java | 62 +- .../sources/BooleanSparseArraySource.java | 91 +- .../table/impl/sources/BoxedColumnSource.java | 49 +- .../impl/sources/BoxedLongAsTimeSource.java | 108 ++ .../table/impl/sources/ByteArraySource.java | 283 +++- .../impl/sources/ByteSparseArraySource.java | 95 +- .../impl/sources/CharacterArraySource.java | 283 +++- .../sources/CharacterSparseArraySource.java | 95 +- .../impl/sources/ConvertableTimeSource.java | 71 + .../impl/sources/DateTimeArraySource.java | 251 +-- .../sources/DateTimeAsLongColumnSource.java | 88 +- .../sources/DateTimeSparseArraySource.java | 254 +-- .../table/impl/sources/DoubleArraySource.java | 283 +++- .../impl/sources/DoubleSparseArraySource.java | 95 +- .../table/impl/sources/FloatArraySource.java | 283 +++- .../impl/sources/FloatSparseArraySource.java | 95 +- .../impl/sources/InMemoryColumnSource.java | 19 +- .../impl/sources/InstantArraySource.java | 40 + .../sources/InstantAsLongColumnSource.java | 23 + .../sources/InstantSparseArraySource.java | 36 + .../impl/sources/IntegerArraySource.java | 283 +++- .../sources/IntegerSparseArraySource.java | 95 +- .../impl/sources/LocalDateWrapperSource.java | 122 ++ .../impl/sources/LocalTimeWrapperSource.java | 122 ++ .../table/impl/sources/LongArraySource.java | 1227 +++++++++++++- .../sources/LongAsDateTimeColumnSource.java | 135 +- .../sources/LongAsInstantColumnSource.java | 23 + .../sources/LongAsLocalDateColumnSource.java | 29 + .../sources/LongAsLocalTimeColumnSource.java | 29 + .../LongAsZonedDateTimeColumnSource.java | 33 + .../impl/sources/LongSparseArraySource.java | 1418 ++++++++++++++++- .../sources/NanosBasedTimeArraySource.java | 286 ++++ .../NanosBasedTimeSparseArraySource.java | 228 +++ .../impl/sources/ObjectSparseArraySource.java | 95 +- .../impl/sources/RedirectedColumnSource.java | 75 +- .../table/impl/sources/ReinterpretUtils.java | 75 +- .../table/impl/sources/ShortArraySource.java | 283 +++- .../impl/sources/ShortSparseArraySource.java | 95 +- .../impl/sources/SparseArrayColumnSource.java | 4 + .../UnboxedTimeBackedColumnSource.java | 111 ++ .../sources/ZonedDateTimeArraySource.java | 62 + .../sources/ZonedDateTimeAsLongSource.java | 24 + .../ZonedDateTimeSparseArraySource.java | 62 + .../immutable/Immutable2DByteArraySource.java | 121 +- .../immutable/Immutable2DCharArraySource.java | 121 +- .../Immutable2DDateTimeArraySource.java | 41 + .../Immutable2DDoubleArraySource.java | 121 +- .../Immutable2DFloatArraySource.java | 121 +- .../Immutable2DInstantArraySource.java | 41 + .../immutable/Immutable2DIntArraySource.java | 121 +- .../immutable/Immutable2DLongArraySource.java | 299 +++- .../Immutable2DNanosBasedTimeArraySource.java | 264 +++ .../Immutable2DObjectArraySource.java | 121 +- .../Immutable2DShortArraySource.java | 121 +- .../Immutable2DZonedDateTimeArraySource.java | 58 + .../immutable/ImmutableByteArraySource.java | 119 +- .../immutable/ImmutableCharArraySource.java | 119 +- .../ImmutableConstantByteSource.java | 30 +- .../ImmutableConstantCharSource.java | 8 +- .../ImmutableConstantDateTimeSource.java | 40 + .../ImmutableConstantDoubleSource.java | 8 +- .../ImmutableConstantFloatSource.java | 8 +- .../ImmutableConstantInstantSource.java | 41 + .../immutable/ImmutableConstantIntSource.java | 8 +- .../ImmutableConstantLongSource.java | 79 +- ...ImmutableConstantNanosBasedTimeSource.java | 183 +++ .../ImmutableConstantObjectSource.java | 8 +- .../ImmutableConstantShortSource.java | 8 +- .../ImmutableConstantZonedDateTimeSource.java | 52 + .../ImmutableDateTimeArraySource.java | 44 + .../immutable/ImmutableDoubleArraySource.java | 119 +- .../immutable/ImmutableFloatArraySource.java | 119 +- .../ImmutableInstantArraySource.java | 45 + .../immutable/ImmutableIntArraySource.java | 119 +- .../immutable/ImmutableLongArraySource.java | 286 +++- .../ImmutableNanosBasedTimeArraySource.java | 238 +++ .../immutable/ImmutableObjectArraySource.java | 119 +- .../immutable/ImmutableShortArraySource.java | 119 +- .../ImmutableZonedDateTimeArraySource.java | 58 + .../impl/sources/ring/RingColumnSource.java | 4 + .../engine/table/impl/util/ColumnHolder.java | 2 + .../table/impl/util/TableTimeConversions.java | 341 ++++ .../table/impl/util/copy/CopyKernel.java | 2 +- .../impl/util/freezeby/FreezeByOperator.java | 5 +- .../util/freezeby/LongFreezeByHelper.java | 6 +- .../stream/StreamToTableAdapter.java | 6 +- .../impl/TestFunctionConsistencyMonitor.java | 157 -- .../impl/select/TestReinterpretedColumn.java | 517 ++++++ .../util/TestCompileSimpleFunction.java | 9 - .../deephaven/engine/testutil/ColumnInfo.java | 3 +- .../deephaven/engine/testutil/TstUtils.java | 9 + .../sources/ImmutableByteTestSource.java | 4 +- .../sources/ImmutableCharTestSource.java | 4 +- .../sources/ImmutableDoubleTestSource.java | 2 +- .../sources/ImmutableFloatTestSource.java | 2 +- .../sources/ImmutableInstantTestSource.java | 134 ++ .../sources/ImmutableIntTestSource.java | 4 +- .../sources/ImmutableLongTestSource.java | 4 +- .../sources/ImmutableObjectTestSource.java | 2 +- .../sources/ImmutableShortTestSource.java | 2 +- .../testutil/sources/InstantTestSource.java | 157 ++ .../sources/UnboxedInstantTestSource.java | 43 + .../main/java/io/deephaven/time/DateTime.java | 234 ++- .../java/io/deephaven/time/DateTimeUtils.java | 434 +++-- .../main/java/io/deephaven/time/TimeZone.java | 71 +- .../java/io/deephaven/time/TestDateTime.java | 83 - .../io/deephaven/time/TestDateTimeUtils.java | 9 +- .../chunk/ChunkInputStreamGenerator.java | 38 + .../extensions/barrage/util/BarrageUtil.java | 4 +- ...lumnExpressionCompletionHandlerTest.groovy | 20 +- .../ReplicateDupCompactKernel.java | 2 +- .../replicators/ReplicateFreezeBy.java | 14 +- .../replicators/ReplicateOperators.java | 26 +- .../ReplicateSourcesAndChunks.java | 590 ++++++- .../replication/ReplicatePrimitiveCode.java | 10 + .../replication/ReplicationUtils.java | 59 +- 146 files changed, 12185 insertions(+), 4580 deletions(-) create mode 100644 Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java create mode 100644 Util/src/test/java/io/deephaven/util/codec/ZonedDateTimeCodecTest.java rename engine/table/src/main/java/io/deephaven/engine/table/impl/{UnboxedDateTimeColumnSource.java => UnboxedLongBackedColumnSource.java} (62%) delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataService.java delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataServiceConsistencyMonitor.java delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractLongArraySource.java delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractSparseLongArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedLongAsTimeSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ConvertableTimeSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantAsLongColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantSparseArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsInstantColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalDateColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalTimeColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsZonedDateTimeColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeAsLongSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeSparseArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDateTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DInstantArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DNanosBasedTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DZonedDateTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDateTimeSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantInstantSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantZonedDateTimeSource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDateTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableInstantArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableNanosBasedTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableZonedDateTimeArraySource.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java delete mode 100644 engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestFunctionConsistencyMonitor.java create mode 100644 engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java create mode 100644 engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableInstantTestSource.java create mode 100644 engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/InstantTestSource.java create mode 100644 engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/UnboxedInstantTestSource.java diff --git a/Util/src/main/java/io/deephaven/util/codec/CodecCache.java b/Util/src/main/java/io/deephaven/util/codec/CodecCache.java index 8c395a14c25..31256db62dd 100644 --- a/Util/src/main/java/io/deephaven/util/codec/CodecCache.java +++ b/Util/src/main/java/io/deephaven/util/codec/CodecCache.java @@ -13,6 +13,7 @@ import java.math.BigInteger; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZonedDateTime; import java.util.HashMap; import java.util.Map; @@ -94,6 +95,8 @@ public static String getDefaultCodecClass(@NotNull final Class dataType) { return BigDecimalCodec.class.getName(); } else if (dataType.equals(BigInteger.class)) { return BigIntegerCodec.class.getName(); + } else if (dataType.equals(ZonedDateTime.class)) { + return ZonedDateTimeCodec.class.getName(); } else { return null; } diff --git a/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java b/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java new file mode 100644 index 00000000000..3354616e1dd --- /dev/null +++ b/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java @@ -0,0 +1,96 @@ +package io.deephaven.util.codec; + +import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.util.QueryConstants; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +public class ZonedDateTimeCodec implements ObjectCodec { + static final long MAX_CONVERTIBLE_SECONDS = (Long.MAX_VALUE / 1_000_000_000L) - 1; + + public ZonedDateTimeCodec(String args) {} + + @NotNull + @Override + public byte[] encode(@Nullable ZonedDateTime input) { + if (input == null) { + return CollectionUtil.ZERO_LENGTH_BYTE_ARRAY; + } + + final int bufSize = computeSize(input); + final byte[] buf = new byte[bufSize]; + ByteBuffer bb = ByteBuffer.wrap(buf); + bb.putLong(toEpochNano(input)); + + final String zone = input.getZone().getId(); + bb.putInt(zone.length()); + bb.put(zone.getBytes()); + + return buf; + } + + @Nullable + @Override + public ZonedDateTime decode(@NotNull byte[] input, int offset, int length) { + if (length == 0) { + return null; + } + + final ByteBuffer buf = ByteBuffer.wrap(input, offset, length); + final long nanos = buf.getLong(); + final int zidLen = buf.getInt(); + + final byte[] zidBytes = new byte[zidLen]; + buf.get(zidBytes, 0, zidLen); + final String zid = new String(zidBytes); + + return ZonedDateTime.ofInstant(Instant.ofEpochSecond(0, nanos), ZoneId.of(zid)); + } + + @Override + public boolean isNullable() { + return true; + } + + @Override + public int getPrecision() { + return 0; + } + + @Override + public int getScale() { + return 0; + } + + @Override + public int expectedObjectWidth() { + return VARIABLE_WIDTH_SENTINEL; + } + + private static int computeSize(@NotNull ZonedDateTime val) { + return Long.BYTES + Integer.BYTES + val.getZone().getId().length(); + } + + // Sadly, this is copied from DBTimeUtils since that lives in the DB package and this cannot. + private static long toEpochNano(@Nullable final ZonedDateTime value) { + if (value == null) { + return QueryConstants.NULL_LONG; + } + + return safeComputeNanos(value.toEpochSecond(), value.getNano()); + } + + private static long safeComputeNanos(long epochSecond, long nanoOfSecond) { + if (epochSecond > MAX_CONVERTIBLE_SECONDS) { + throw new IllegalArgumentException( + "Numeric overflow detected during conversion of " + epochSecond + " to nanoseconds"); + } + + return epochSecond * 1_000_000_000L + nanoOfSecond; + } +} diff --git a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java index e7c1123cf2f..8ae786a6b9e 100644 --- a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java @@ -10,6 +10,7 @@ import java.lang.annotation.RetentionPolicy; import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; import java.util.*; import java.util.stream.Collectors; @@ -512,14 +513,15 @@ public static boolean isCharacter(@NotNull final Class c) { } /** - * Whether the class is a DateTime or Date. + * Whether the class is a Date, DateTime, or Instant. * * @param type The class. - * @return true if the type is a DateTime or {@link Date}. + * @return true if the type is a {@link Date}, DateTime or {@link Instant}. */ public static boolean isDateTime(Class type) { - return Date.class.isAssignableFrom(type) || type.getAnnotation(IsDateTime.class) != null - && type.getAnnotation(IsDateTime.class).value(); + return Date.class.isAssignableFrom(type) + || Instant.class.isAssignableFrom(type) + || (type.getAnnotation(IsDateTime.class) != null && type.getAnnotation(IsDateTime.class).value()); } /** diff --git a/Util/src/test/java/io/deephaven/util/codec/ZonedDateTimeCodecTest.java b/Util/src/test/java/io/deephaven/util/codec/ZonedDateTimeCodecTest.java new file mode 100644 index 00000000000..de2dccfea6b --- /dev/null +++ b/Util/src/test/java/io/deephaven/util/codec/ZonedDateTimeCodecTest.java @@ -0,0 +1,61 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.util.codec; + +import org.junit.Test; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class ZonedDateTimeCodecTest { + private void roundTripWithOffset(final ZonedDateTime value, final int offset) { + final ZonedDateTimeCodec codec = new ZonedDateTimeCodec(""); + byte[] enc = codec.encode(value); + // if we expect to be decoding from an offset, construct the input accordingly + if (offset > 0) { + final byte[] buffer = new byte[enc.length + offset]; + System.arraycopy(enc, 0, buffer, offset, enc.length); + enc = buffer; + } + + // when fixed width we expect every encoded value to be the same size + if (codec.expectedObjectWidth() > 0) { + assertEquals(codec.expectedObjectWidth(), enc.length - offset); + } + final ZonedDateTime v1 = codec.decode(enc, offset, enc.length - offset); + assertEquals(value, v1); + } + + @Test + public void testNull() { + roundTripWithOffset(null, 0); + } + + @Test + public void testMax() { + try { + roundTripWithOffset(ZonedDateTime.ofInstant( + Instant.ofEpochSecond(ZonedDateTimeCodec.MAX_CONVERTIBLE_SECONDS + 1), + ZoneId.of("America/New_York")), 0); + fail(); + } catch (IllegalArgumentException ignored) { + } + + roundTripWithOffset(ZonedDateTime.ofInstant( + Instant.ofEpochSecond(ZonedDateTimeCodec.MAX_CONVERTIBLE_SECONDS, 999_999_999L), + ZoneId.of("America/New_York")), 0); + + } + + @Test + public void TestNormal() { + roundTripWithOffset(ZonedDateTime.of(1969, 10, 20, 10, 11, 12, 13, ZoneId.of("America/Chicago")), 0); + roundTripWithOffset(ZonedDateTime.of(2020, 10, 20, 10, 11, 12, 13, ZoneId.of("America/New_York")), 3); + roundTripWithOffset(ZonedDateTime.of(2200, 10, 20, 10, 11, 12, 13, ZoneId.of("America/Los_Angeles")), 6); + } +} diff --git a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java index 3df564be164..60cf5444066 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java @@ -3,41 +3,165 @@ */ package io.deephaven.engine.table; +import io.deephaven.util.QueryConstants; +import org.jetbrains.annotations.Nullable; + +/** + * A source of element data within a table. + * + * @param the type of underlying data. + */ public interface ElementSource { + /** + * Get the value from the source. This may return boxed values for basic types. + * + * @param rowKey the location in key space to get the value from. + * @return the value at the rowKey, potentially null. + */ + @Nullable T get(long rowKey); + /** + * Get the value at the rowKey as a Boolean. + * + * @param rowKey the location in key space to get the value from. + * @return the boolean at the rowKey, potentially null. + */ + @Nullable Boolean getBoolean(long rowKey); + /** + * Get the value at the rowKey as a byte. + * + * @param rowKey the location in key space to get the value from. + * @return the boolean at the rowKey, null values are represented by {@link QueryConstants#NULL_BYTE} + */ byte getByte(long rowKey); + /** + * Get the value at the rowKey as a char. + * + * @param rowKey the location in key space to get the value from. + * @return the char at the rowKey, null values are represented by {@link QueryConstants#NULL_CHAR} + */ char getChar(long rowKey); + /** + * Get the value at the rowKey as a double. + * + * @param rowKey the location in key space to get the value from. + * @return the double at the rowKey, null values are represented by {@link QueryConstants#NULL_DOUBLE} + */ double getDouble(long rowKey); + /** + * Get the value at the rowKey as a float. + * + * @param rowKey the location in key space to get the value from. + * @return the float at the rowKey, null values are represented by {@link QueryConstants#NULL_FLOAT} + */ float getFloat(long rowKey); + /** + * Get the value at the rowKey as an int. + * + * @param rowKey the location in key space to get the value from. + * @return the int at the rowKey, null values are represented by {@link QueryConstants#NULL_INT} + */ int getInt(long rowKey); + /** + * Get the value at the rowKey as a long. + * + * @param rowKey the location in key space to get the value from. + * @return the long at the rowKey, null values are represented by {@link QueryConstants#NULL_LONG} + */ long getLong(long rowKey); + /** + * Get the value at the rowKey as a short. + * + * @param rowKey the location in key space to get the value from. + * @return the short at the rowKey, null values are represented by {@link QueryConstants#NULL_SHORT} + */ short getShort(long rowKey); + /** + * Get the previous value at the rowKey. Previous values are used during an + * {@link io.deephaven.engine.updategraph.UpdateGraphProcessor UGP} + * {@link io.deephaven.engine.updategraph.LogicalClock.State#Updating update} cycle to process changes in data. + * During {@link io.deephaven.engine.updategraph.LogicalClock.State#Idle normal} operation previous values will be + * identical to {@link #get(long) current} values. + * + * @param rowKey the location in key space to get the value from. + * @return the previous value at the rowKey, or null. + */ + @Nullable T getPrev(long rowKey); + /** + * Get the previous value at the rowKey as a Boolean. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous boolean at the rowKey, or null. + */ + @Nullable Boolean getPrevBoolean(long rowKey); + /** + * Get the previous value at the rowKey as a byte. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous boolean at the rowKey, null values are represented by {@link QueryConstants#NULL_BYTE} + */ byte getPrevByte(long rowKey); + /** + * Get the previous value at the rowKey as a char. See {@link #getPrev(long)} for more details. + * + * @param rowKey ohe location in key space to get the previous value from. + * @return the previous char at the rowKey, null values are represented by {@link QueryConstants#NULL_CHAR} + */ char getPrevChar(long rowKey); + /** + * Get the previous value at the rowKey as a double. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous double at the rowKey, null values are represented by {@link QueryConstants#NULL_DOUBLE} + */ double getPrevDouble(long rowKey); + /** + * Get the previous value at the rowKey as a float. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous float at the rowKey, null values are represented by {@link QueryConstants#NULL_FLOAT} + */ float getPrevFloat(long rowKey); + /** + * Get the previous value at the rowKey as an int. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous int at the rowKey, null values are represented by {@link QueryConstants#NULL_INT} + */ int getPrevInt(long rowKey); + /** + * Get the previous value at the rowKey as a long. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous long at the rowKey, null values are represented by {@link QueryConstants#NULL_LONG} + */ long getPrevLong(long rowKey); + /** + * Get the previous value at the rowKey as a short. See {@link #getPrev(long)} for more details. + * + * @param rowKey the location in key space to get the previous value from. + * @return the previous short at the rowKey, null values are represented by {@link QueryConstants#NULL_SHORT} + */ short getPrevShort(long rowKey); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java index 4480add5941..048de545d11 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java @@ -15,6 +15,7 @@ import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; import io.deephaven.engine.table.impl.chunkfilter.ChunkFilter; import io.deephaven.engine.table.impl.chunkfilter.ChunkMatchFilterFactory; +import io.deephaven.engine.table.impl.sources.UnboxedLongBackedColumnSource; import io.deephaven.time.DateTime; import io.deephaven.vector.*; import io.deephaven.hash.KeyedObjectHashSet; @@ -290,8 +291,7 @@ protected ColumnSource doReinterpret( Assert.eq(getType(), "getType()", DateTime.class); Assert.eq(alternateDataType, "alternateDataType", long.class); // noinspection unchecked - return (ColumnSource) new UnboxedDateTimeWritableSource( - (WritableColumnSource) this); + return (ColumnSource) new UnboxedLongBackedColumnSource<>(this); } public static abstract class DefaultedMutable extends AbstractColumnSource diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java index 89aea989c31..870e7e7859b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java @@ -5,6 +5,10 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.Nullable; + +import java.time.Instant; import static io.deephaven.time.DateTimeUtils.nanosToTime; import static io.deephaven.util.type.TypeUtils.box; @@ -381,7 +385,7 @@ default short getShort(final long rowKey) { * Default interface for long {@link ColumnSource} implementations. */ public interface ForLong extends LongBacked { - + @Nullable @Override default Long get(final long rowKey) { return box(getLong(rowKey)); @@ -399,6 +403,17 @@ default DateTime get(final long rowKey) { } } + /** + * Default interface for {@link Instant} {@link ColumnSource} implementations. + */ + public interface ForLongAsInstant extends LongBacked { + @Nullable + @Override + default Instant get(long index) { + return DateTimeUtils.makeInstant(getLong(index)); + } + } + /** * Default interface for short {@link ColumnSource} implementations. */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ImmutableColumnSourceGetDefaults.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ImmutableColumnSourceGetDefaults.java index 8545e495e3a..2b006e41056 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ImmutableColumnSourceGetDefaults.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ImmutableColumnSourceGetDefaults.java @@ -6,6 +6,8 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; +import java.time.Instant; + /** * Defaulted interfaces for various immutable {@link ColumnSource} types, in order to avoid having defaults at higher * levels in the class hierarchy. @@ -75,6 +77,13 @@ public interface ForLongAsDateTime extends ColumnSourceGetDefaults.ForLongAsDateTime, ImmutableColumnSource { } + /** + * Default interface for immutable {@link DateTime} {@link ColumnSource} implementations. + */ + public interface ForLongAsInstant + extends ColumnSourceGetDefaults.ForLongAsInstant, ImmutableColumnSource { + } + /** * Default interface for immutable short {@link ColumnSource} implementations. */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java index 8617ecfcae0..88e158a00a5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java @@ -5,6 +5,10 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.Nullable; + +import java.time.Instant; import static io.deephaven.time.DateTimeUtils.nanosToTime; import static io.deephaven.util.type.TypeUtils.box; @@ -394,13 +398,24 @@ default Long getPrev(final long rowKey) { * Default interface for mutable {@link DateTime} {@link ColumnSource} implementations. */ public interface ForLongAsDateTime extends ColumnSourceGetDefaults.ForLongAsDateTime, LongBacked { - + @Nullable @Override default DateTime getPrev(final long rowKey) { return nanosToTime(getPrevLong(rowKey)); } } + /** + * Default interface for mutable {@link Instant} {@link ColumnSource} implementations. + */ + public interface ForLongAsInstant extends ColumnSourceGetDefaults.ForLongAsInstant, LongBacked { + @Nullable + @Override + default Instant getPrev(long index) { + return DateTimeUtils.makeInstant(getPrevLong(index)); + } + } + /** * Default interface for mutable short {@link ColumnSource} implementations. */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java index 0f7f20d45cb..45d8b2a8ed9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java @@ -423,8 +423,8 @@ public void validateValues(final String what, final RowSequence toValidate, fina chunkEquals.equal(expected, actual, equalValuesDest()); MutableInt off = new MutableInt(); toValidate.forAllRowKeys((i) -> { - off.increment(); - if (equalValuesDest().get(off.intValue() - 1)) { + final int rowOffset = off.getAndIncrement(); + if (equalValuesDest().get(rowOffset)) { return; } @@ -432,9 +432,8 @@ public void validateValues(final String what, final RowSequence toValidate, fina Object eValue = expectedSource.get(i); Object aValue = usePrev ? source.getPrev(i) : source.get(i); String chunkEValue = ChunkUtils.extractKeyStringFromChunk(expectedSource.getChunkType(), expected, - off.intValue() - 1); - String chunkAValue = - ChunkUtils.extractKeyStringFromChunk(source.getChunkType(), actual, off.intValue() - 1); + rowOffset); + String chunkAValue = ChunkUtils.extractKeyStringFromChunk(source.getChunkType(), actual, rowOffset); return what + (usePrev ? " (previous)" : "") + " columnName=" + name + " k=" + i + " (from source) expected=" + eValue + " actual=" + aValue + diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeWritableSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeWritableSource.java index 04692a430d8..bc04928a30b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeWritableSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeWritableSource.java @@ -3,16 +3,15 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; -import io.deephaven.time.DateTime; import static io.deephaven.util.QueryConstants.NULL_LONG; -public class UnboxedDateTimeWritableSource extends UnboxedDateTimeColumnSource implements WritableColumnSource { - private final WritableColumnSource alternateWritableSource; +public class UnboxedDateTimeWritableSource extends UnboxedLongBackedColumnSource + implements WritableColumnSource { + private final WritableColumnSource alternateWritableSource; - public UnboxedDateTimeWritableSource(WritableColumnSource alternateWritableSource) { + public UnboxedDateTimeWritableSource(WritableColumnSource alternateWritableSource) { super(alternateWritableSource); this.alternateWritableSource = alternateWritableSource; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedLongBackedColumnSource.java similarity index 62% rename from engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeColumnSource.java rename to engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedLongBackedColumnSource.java index 57b73904291..0a41b02096d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedDateTimeColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/UnboxedLongBackedColumnSource.java @@ -7,16 +7,18 @@ import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; +import java.time.Instant; + /** - * Reinterpret result for many {@link ColumnSource} implementations that internally represent {@link DateTime} values as - * {@code long} values. + * Reinterpret result for many {@link ColumnSource} implementations that internally represent time values, such as + * {@link DateTime} and {@link Instant}, as {@code long} values. */ -public class UnboxedDateTimeColumnSource extends AbstractColumnSource +public class UnboxedLongBackedColumnSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForLong { - private final ColumnSource alternateColumnSource; + private final ColumnSource alternateColumnSource; - public UnboxedDateTimeColumnSource(ColumnSource alternateColumnSource) { + public UnboxedLongBackedColumnSource(ColumnSource alternateColumnSource) { super(long.class); this.alternateColumnSource = alternateColumnSource; } @@ -39,14 +41,18 @@ public boolean isImmutable() { @Override public boolean allowsReinterpret( @NotNull final Class alternateDataType) { - return alternateDataType == DateTime.class; + return alternateDataType == alternateColumnSource.getType() + || alternateColumnSource.allowsReinterpret(alternateDataType); } @Override public ColumnSource doReinterpret( @NotNull final Class alternateDataType) throws IllegalArgumentException { - // noinspection unchecked - return (ColumnSource) alternateColumnSource; + if (alternateDataType == alternateColumnSource.getType()) { + // noinspection unchecked + return (ColumnSource) alternateColumnSource; + } + return alternateColumnSource.reinterpret(alternateDataType); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteChunkedAddOnlyMinMaxOperator.java index d6c8f302090..f70a10c0e89 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteChunkedAddOnlyMinMaxOperator.java @@ -27,6 +27,8 @@ */ class ByteChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final ByteArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharChunkedAddOnlyMinMaxOperator.java index 17e9827fc7c..56c777ad82d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharChunkedAddOnlyMinMaxOperator.java @@ -22,6 +22,8 @@ */ class CharChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final CharacterArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -148,7 +150,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleChunkedAddOnlyMinMaxOperator.java index ce3f6d6da15..93ddea0c7f0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleChunkedAddOnlyMinMaxOperator.java @@ -27,6 +27,8 @@ */ class DoubleChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final DoubleArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatChunkedAddOnlyMinMaxOperator.java index 5b760db6179..319e110609f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatChunkedAddOnlyMinMaxOperator.java @@ -27,6 +27,8 @@ */ class FloatChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final FloatArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntChunkedAddOnlyMinMaxOperator.java index 805df19cfb8..ff06876138c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntChunkedAddOnlyMinMaxOperator.java @@ -27,6 +27,8 @@ */ class IntChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final IntegerArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java index 894191b1931..7cf16168acb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java @@ -8,9 +8,13 @@ */ package io.deephaven.engine.table.impl.by; +import java.time.Instant; import io.deephaven.time.DateTime; +import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DateTimeArraySource; +import io.deephaven.engine.table.impl.sources.InstantArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; +import io.deephaven.engine.table.impl.sources.NanosBasedTimeArraySource; import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.ChunkPositions; @@ -18,7 +22,7 @@ import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.LongComparisons; -import io.deephaven.engine.table.impl.sources.AbstractLongArraySource; +import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.chunk.*; import org.apache.commons.lang3.mutable.MutableInt; @@ -30,7 +34,10 @@ * Iterative average operator. */ class LongChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { - private final AbstractLongArraySource resultColumn; + private final LongArraySource resultColumn; + // region actualResult + private final ArrayBackedColumnSource actualResult; + // endregion actualResult private final boolean minimum; private final String name; @@ -42,7 +49,15 @@ class LongChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOpe this.minimum = minimum; this.name = name; // region resultColumn initialization - resultColumn = type == DateTime.class ? new DateTimeArraySource() : new LongArraySource(); + if (type == DateTime.class) { + actualResult = new DateTimeArraySource(); + resultColumn = ((NanosBasedTimeArraySource)actualResult).toEpochNano(); + } else if (type == Instant.class) { + actualResult = new InstantArraySource(); + resultColumn = ((NanosBasedTimeArraySource)actualResult).toEpochNano(); + } else { + actualResult = resultColumn = new LongArraySource(); + } // endregion resultColumn initialization } @@ -158,7 +173,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { - return Collections.>singletonMap(name, resultColumn); + // region getResultColumns + return Collections.>singletonMap(name, actualResult); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectChunkedAddOnlyMinMaxOperator.java index e3ee02107b8..fb28820f45d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectChunkedAddOnlyMinMaxOperator.java @@ -26,6 +26,8 @@ */ class ObjectChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final ObjectArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortChunkedAddOnlyMinMaxOperator.java index 0bfd8a06dec..bb3064ce5ce 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortChunkedAddOnlyMinMaxOperator.java @@ -27,6 +27,8 @@ */ class ShortChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final ShortArraySource resultColumn; + // region actualResult + // endregion actualResult private final boolean minimum; private final String name; @@ -153,7 +155,9 @@ public void ensureCapacity(long tableSize) { @Override public Map> getResultColumns() { + // region getResultColumns return Collections.>singletonMap(name, resultColumn); + // endregion getResultColumns } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataService.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataService.java deleted file mode 100644 index fded1117149..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataService.java +++ /dev/null @@ -1,240 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.engine.table.impl.locations.impl; - -import io.deephaven.base.verify.Require; -import io.deephaven.engine.util.Formatter; -import io.deephaven.engine.table.impl.locations.*; -import io.deephaven.hash.KeyedObjectHashSet; -import io.deephaven.hash.KeyedObjectKey; -import io.deephaven.util.SafeCloseable; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; - -import java.util.*; -import java.util.stream.Collectors; - -/** - * Routing {@link TableDataService} that applies a selector function to pick service(s) for each request. It is assumed - * that each service will provide access to a non-overlapping set of table locations for any table key. - */ -public class CompositeTableDataService extends AbstractTableDataService { - - private static final String IMPLEMENTATION_NAME = CompositeTableDataService.class.getSimpleName(); - - private final ServiceSelector serviceSelector; - - public interface ServiceSelector { - - TableDataService[] call(@NotNull TableKey tableKey); - - void resetServices(); - - void resetServices(@NotNull TableKey key); - - /** - * Get a detailed description string. - * - * @return A description string - * @implNote Defaults to {@link #toString()} - */ - default String describe() { - return toString(); - } - } - - /** - * @param name optional name for this service - * @param serviceSelector Function to map a table key to a set of services that should be queried. - */ - public CompositeTableDataService(@NotNull String name, @NotNull final ServiceSelector serviceSelector) { - super(name); - this.serviceSelector = Require.neqNull(serviceSelector, "serviceSelector"); - } - - @Override - public void reset() { - super.reset(); - serviceSelector.resetServices(); - } - - @Override - public void reset(@NotNull final TableKey key) { - super.reset(key); - serviceSelector.resetServices(key); - } - - @Override - @NotNull - protected TableLocationProvider makeTableLocationProvider(@NotNull final TableKey tableKey) { - final TableDataService[] services = serviceSelector.call(tableKey); - if (services == null || services.length == 0) { - throw new TableDataException("No services found for " + tableKey + " in " + serviceSelector); - } - if (services.length == 1) { - return services[0].getTableLocationProvider(tableKey); - } - return new TableLocationProviderImpl(services, tableKey); - } - - private class TableLocationProviderImpl implements TableLocationProvider { - - private final ImmutableTableKey tableKey; - - private final List inputProviders; - private final String implementationName; - - private TableLocationProviderImpl(@NotNull final TableDataService[] inputServices, - @NotNull final TableKey tableKey) { - this.tableKey = tableKey.makeImmutable(); - inputProviders = Arrays.stream(inputServices).map(s -> s.getTableLocationProvider(this.tableKey)) - .collect(Collectors.toList()); - implementationName = "Composite-" + inputProviders; - } - - @Override - public String getImplementationName() { - return implementationName; - } - - @Override - public ImmutableTableKey getKey() { - return tableKey; - } - - @Override - public boolean supportsSubscriptions() { - return inputProviders.stream().anyMatch(TableLocationProvider::supportsSubscriptions); - } - - @Override - public void subscribe(@NotNull final Listener listener) { - inputProviders.forEach(p -> { - if (p.supportsSubscriptions()) { - p.subscribe(listener); - } else { - p.refresh(); - p.getTableLocationKeys().forEach(listener::handleTableLocationKey); - } - }); - } - - @Override - public void unsubscribe(@NotNull final Listener listener) { - inputProviders.forEach(p -> { - if (p.supportsSubscriptions()) { - p.unsubscribe(listener); - } - }); - } - - @Override - public void refresh() { - inputProviders.forEach(TableLocationProvider::refresh); - } - - @Override - public TableLocationProvider ensureInitialized() { - inputProviders.forEach(TableLocationProvider::ensureInitialized); - return this; - } - - @Override - @NotNull - public Collection getTableLocationKeys() { - final Set locationKeys = new KeyedObjectHashSet<>(KeyKeyDefinition.INSTANCE); - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - inputProviders.stream() - .map(TableLocationProvider::getTableLocationKeys) - .flatMap(Collection::stream) - .filter(x -> !locationKeys.add(x)) - .findFirst() - .ifPresent(duplicateLocationKey -> { - final String overlappingProviders = inputProviders.stream() - .filter(inputProvider -> inputProvider.hasTableLocationKey(duplicateLocationKey)) - .map(TableLocationProvider::getName) - .collect(Collectors.joining(",")); - throw new TableDataException( - "Data Routing Configuration error: TableDataService elements overlap at location " + - duplicateLocationKey + - " in providers " + overlappingProviders + - ". Full TableDataService configuration:\n" + - Formatter - .formatTableDataService(CompositeTableDataService.this.toString())); - }); - return Collections.unmodifiableCollection(locationKeys); - } - } - - @Override - public boolean hasTableLocationKey(@NotNull final TableLocationKey tableLocationKey) { - return inputProviders.stream() - .anyMatch(inputProvider -> inputProvider.hasTableLocationKey(tableLocationKey)); - } - - @Override - @Nullable - public TableLocation getTableLocationIfPresent(@NotNull final TableLocationKey tableLocationKey) { - // hang onto the first location and provider, so we can report well on any duplicates - TableLocation location = null; - TableLocationProvider provider = null; - - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - for (final TableLocationProvider tlp : inputProviders) { - final TableLocation candidateLocation = tlp.getTableLocationIfPresent(tableLocationKey); - if (candidateLocation != null) { - if (location != null) { - throw new TableDataException("TableDataService elements " + provider.getName() + - " and " + tlp.getName() + " overlap at location " + location.toGenericString() + - ". Full TableDataService configuration:\n" + - Formatter.formatTableDataService(CompositeTableDataService.this.toString())); - } - location = candidateLocation; - provider = tlp; - } - } - } - return location; - } - } - - @Override - public String getImplementationName() { - return IMPLEMENTATION_NAME; - } - - @Override - public String toString() { - return getImplementationName() + '{' + - (getName() == null ? "" : "name=" + getName() + ", ") + - "serviceSelector=" + serviceSelector + - '}'; - } - - @Override - public String describe() { - return getImplementationName() + '{' + - (getName() == null ? "" : "name=" + getName() + ", ") + - "serviceSelector=" + serviceSelector.describe() + - '}'; - } - - // ------------------------------------------------------------------------------------------------------------------ - // Location key definition implementation - // ------------------------------------------------------------------------------------------------------------------ - - private static final class KeyKeyDefinition - extends KeyedObjectKey.Basic { - - private static final KeyedObjectKey INSTANCE = - new KeyKeyDefinition(); - - private KeyKeyDefinition() {} - - @Override - public ImmutableTableLocationKey getKey(@NotNull final ImmutableTableLocationKey tableLocationKey) { - return tableLocationKey; - } - } -} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataServiceConsistencyMonitor.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataServiceConsistencyMonitor.java deleted file mode 100644 index ed2df7ac855..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/locations/impl/CompositeTableDataServiceConsistencyMonitor.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.engine.table.impl.locations.impl; - -import io.deephaven.time.DateTimeUtils; - -import java.util.function.Supplier; - -/** - * A consistency monitor for use in the CompositeTableDataService. - */ -public class CompositeTableDataServiceConsistencyMonitor { - /** - * The default instance used by the CompositeTableDataServices. - */ - static final FunctionConsistencyMonitor INSTANCE = new FunctionConsistencyMonitor(); - - public static class ConsistentSupplier extends FunctionConsistencyMonitor.ConsistentSupplier { - public ConsistentSupplier(Supplier underlyingSupplier) { - super(CompositeTableDataServiceConsistencyMonitor.INSTANCE, underlyingSupplier); - } - } - - private final static ConsistentSupplier defaultCurrentDateNySupplier = - new ConsistentSupplier<>(DateTimeUtils::currentDateNy); - - /** - * Get the consistent value of currentDateNy() as defined by {@link DateTimeUtils#currentDateNy()}. - * - * @return the current date in the New York time zone. - */ - public static String currentDateNy() { - return defaultCurrentDateNySupplier.get(); - } - - /** - * The same thing as {@link #currentDateNy()}, but with a different name so you can import it more easily and be - * sure you are getting the right value. - */ - public static String consistentDateNy() { - return defaultCurrentDateNySupplier.get(); - } -} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java index f645788c35b..b3fd5a3df69 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java @@ -153,9 +153,11 @@ public static boolean isColumnTypeDisplayable(Class type) { // String // BigInt, BigDecimal // DateTime - return type.isPrimitive() || io.deephaven.util.type.TypeUtils.isBoxedType(type) + return type.isPrimitive() + || io.deephaven.util.type.TypeUtils.isBoxedType(type) || io.deephaven.util.type.TypeUtils.isString(type) - || io.deephaven.util.type.TypeUtils.isBigNumeric(type) || TypeUtils.isDateTime(type) + || io.deephaven.util.type.TypeUtils.isBigNumeric(type) + || TypeUtils.isDateTime(type) || isOnWhiteList(type); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java index f37dc117ccf..824f8055a14 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java @@ -39,6 +39,8 @@ import org.jetbrains.annotations.Nullable; import java.lang.reflect.Array; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.*; import java.util.stream.Stream; @@ -1540,7 +1542,8 @@ public static long estimateSnapshotSize(TableDefinition tableDefinition, BitSet } else if (definition.getDataType() == int.class || definition.getDataType() == float.class) { sizePerRow += 4; } else if (definition.getDataType() == long.class || definition.getDataType() == double.class - || definition.getDataType() == DateTime.class) { + || definition.getDataType() == DateTime.class || definition.getDataType() == Instant.class + || definition.getDataType() == ZonedDateTime.class) { sizePerRow += 8; } else { switch (definition.getName()) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/InitialSnapshotTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/InitialSnapshotTable.java index 0cf2b3bfab0..74cea3533ac 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/InitialSnapshotTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/InitialSnapshotTable.java @@ -15,6 +15,8 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.util.*; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.BitSet; import java.util.LinkedHashMap; import java.util.List; @@ -67,7 +69,8 @@ protected Setter getSetter(final WritableColumnSource source) { return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == int.class) { return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); - } else if (source.getType() == long.class || source.getType() == DateTime.class) { + } else if (source.getType() == long.class || source.getType() == DateTime.class + || source.getType() == Instant.class || source.getType() == ZonedDateTime.class) { return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == short.class) { return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DhFormulaColumn.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DhFormulaColumn.java index 033a44df5f7..272242a2903 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DhFormulaColumn.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DhFormulaColumn.java @@ -40,6 +40,8 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -835,8 +837,8 @@ private static boolean isImmutableType(QueryScopeParam param) { return true; } final Class type = value.getClass(); - if (type == String.class || type == DateTime.class || type == BigInteger.class || type == BigDecimal.class || - Table.class.isAssignableFrom(type)) { + if (type == String.class || type == DateTime.class || type == BigInteger.class || type == BigDecimal.class + || type == Instant.class || type == ZonedDateTime.class || Table.class.isAssignableFrom(type)) { return true; } // if it is a boxed type, then it is immutable; otherwise we don't know what to do with it diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java index 54f70fd909b..d79723ce52c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.select; +import io.deephaven.base.verify.Assert; import io.deephaven.base.verify.Require; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; @@ -12,15 +13,33 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.rowset.TrackingRowSet; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.engine.table.impl.sources.LocalDateWrapperSource; +import io.deephaven.engine.table.impl.sources.LocalTimeWrapperSource; +import io.deephaven.engine.table.impl.sources.LongAsDateTimeColumnSource; +import io.deephaven.engine.table.impl.sources.LongAsInstantColumnSource; +import io.deephaven.engine.table.impl.sources.LongAsLocalDateColumnSource; +import io.deephaven.engine.table.impl.sources.LongAsLocalTimeColumnSource; +import io.deephaven.engine.table.impl.sources.LongAsZonedDateTimeColumnSource; +import io.deephaven.engine.table.impl.sources.ReinterpretUtils; +import io.deephaven.engine.table.impl.util.TableTimeConversions; +import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Function; /** - * Allows us to support ColumnSource reinterpretation via view-type Table operations. Currently, this is only exposed in - * V2 tables' support for dateTimeColumnAsNanos(). + * Allows {@link ColumnSource} reinterpretation via view-type ({@link Table#view} and {@link Table#updateView}) + * {@link Table} operations. * * TODO: If we come up with other valid, useful reinterpretations, it would be trivial to create a general purpose * syntax for use in view()/updateView() column expressions. @@ -42,15 +61,33 @@ public class ReinterpretedColumn implements SelectColumn { private final String destName; @NotNull private final Class destDataType; - + private final Object[] reinterpParams; private ColumnSource sourceColumnSource; - public ReinterpretedColumn(String sourceName, Class sourceDataType, String destName, Class destDataType) { + private ZoneId zone; + + /** + * Create a {@link ReinterpretedColumn} that attempts to convert the source column into the destination type, + * optionally with parameters. + * + * @param sourceName the name of the Source column within the table + * @param sourceDataType the type of the source column + * @param destName the name of the desired destination column + * @param destDataType the type to try to convert to + * @param reinterpParams a varargs set of parameters for the arguments if required. + */ + public ReinterpretedColumn( + @NotNull String sourceName, + @NotNull Class sourceDataType, + @NotNull String destName, + @NotNull Class destDataType, + Object... reinterpParams) { + Assert.gtZero(destName.length(), "destName.length()"); this.sourceName = NameValidator.validateColumnName(sourceName); this.sourceDataType = Require.neqNull(sourceDataType, "sourceDataType"); this.destName = NameValidator.validateColumnName(destName); this.destDataType = Require.neqNull(destDataType, "destDataType"); - Require.gtZero(destName.length(), "destName.length()"); + this.reinterpParams = reinterpParams; } @Override @@ -70,16 +107,53 @@ public List initInputs(TrackingRowSet rowSet, Map) columnsOfInterest.get(sourceName); + + sourceColumnSource = localSourceColumnSource; return getColumns(); } @@ -115,12 +189,87 @@ public List getColumnArrays() { @NotNull @Override public ColumnSource getDataView() { - final ColumnSource result = sourceColumnSource.reinterpret(destDataType); - if (!result.getType().equals(destDataType)) { - throw new IllegalArgumentException("Reinterpreted column from " + sourceName + " has wrong data type " - + result.getType() + ", expected " + destDataType); + + final Function, ColumnSource> checkResult = result -> { + if (!result.getType().equals(destDataType)) { + throw new IllegalArgumentException("Reinterpreted column from " + sourceName + " has wrong data type " + + result.getType() + ", expected " + destDataType); + } + // noinspection unchecked + return (ColumnSource) result; + }; + + if (sourceColumnSource.allowsReinterpret(destDataType)) { + return checkResult.apply(sourceColumnSource.reinterpret(destDataType)); } - return result; + + // The only other conversions we will do are various time permutations. + // If we can just reinterpret as time, great! + if (sourceColumnSource instanceof ConvertableTimeSource && + ((ConvertableTimeSource) sourceColumnSource).supportsTimeConversion()) { + if (destDataType == ZonedDateTime.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toZonedDateTime(zone)); + } else if (destDataType == LocalDate.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toLocalDate(zone)); + } else if (destDataType == LocalTime.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toLocalTime(zone)); + } else if (destDataType == Instant.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toInstant()); + } else if (destDataType == DateTime.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toDateTime()); + } else if (destDataType == long.class || destDataType == Long.class) { + return checkResult.apply(((ConvertableTimeSource) sourceColumnSource).toEpochNano()); + } + } + + if (sourceDataType == ZonedDateTime.class && + (destDataType == LocalDate.class || destDataType == LocalTime.class)) { + // We can short circuit some ZDT conversions to try to be less wasteful + if (destDataType == LocalDate.class) { + return checkResult.apply(new LocalDateWrapperSource( + (ColumnSource) sourceColumnSource, zone)); + } else { + return checkResult.apply(new LocalTimeWrapperSource( + (ColumnSource) sourceColumnSource, zone)); + } + } + + // If we just want to go from X to long, this is fairly straightforward. Note that we skip LocalDate and + // LocalTime these are not linked to nanos of epoch in any way. You could argue that LocalDate is, but then + // we have to create even more garbage objects just to get the "time at midnight". Users should just do that + // directly. + final ColumnSource intermediate; + if (sourceDataType == DateTime.class) { + intermediate = ReinterpretUtils.dateTimeToLongSource(sourceColumnSource); + } else if (sourceDataType == Instant.class) { + intermediate = ReinterpretUtils.instantToLongSource(sourceColumnSource); + } else if (sourceDataType == ZonedDateTime.class) { + intermediate = ReinterpretUtils.zonedDateTimeToLongSource(sourceColumnSource); + } else if (sourceDataType == long.class || sourceDataType == Long.class) { + // noinspection unchecked + intermediate = (ColumnSource) sourceColumnSource; + } else { + throw new IllegalArgumentException("Source column " + sourceName + " (Class=" + + sourceColumnSource.getClass() + ") - cannot be reinterpreted as " + destDataType); + } + + // Otherwise we'll have to go from long back to a wrapped typed source. + if (destDataType == Long.class || destDataType == long.class) { + return checkResult.apply(intermediate); + } else if (destDataType == DateTime.class) { + return checkResult.apply(new LongAsDateTimeColumnSource(intermediate)); + } else if (destDataType == ZonedDateTime.class) { + return checkResult.apply(new LongAsZonedDateTimeColumnSource(intermediate, zone)); + } else if (destDataType == Instant.class) { + return checkResult.apply(new LongAsInstantColumnSource(intermediate)); + } else if (destDataType == LocalDate.class) { + return checkResult.apply(new LongAsLocalDateColumnSource(intermediate, zone)); + } else if (destDataType == LocalTime.class) { + return checkResult.apply(new LongAsLocalTimeColumnSource(intermediate, zone)); + } + + throw new IllegalArgumentException("Source column " + sourceName + " (Class=" + sourceColumnSource.getClass() + + ") - cannot be reinterpreted as " + destDataType); } @NotNull @@ -141,7 +290,7 @@ public MatchPair getMatchPair() { @Override public WritableColumnSource newDestInstance(long size) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("ReinterpretedColumn should only be used with updateView() clauses."); } @Override @@ -173,6 +322,7 @@ public int hashCode() { result = 31 * result + sourceDataType.hashCode(); result = 31 * result + destName.hashCode(); result = 31 * result + destDataType.hashCode(); + result = 31 * result + Arrays.hashCode(reinterpParams); return result; } @@ -188,6 +338,6 @@ public boolean isStateless() { @Override public ReinterpretedColumn copy() { - return new ReinterpretedColumn<>(sourceName, sourceDataType, destName, destDataType); + return new ReinterpretedColumn<>(sourceName, sourceDataType, destName, destDataType, reinterpParams); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractLongArraySource.java deleted file mode 100644 index 4b38208d604..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractLongArraySource.java +++ /dev/null @@ -1,784 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.engine.table.impl.sources; - -import gnu.trove.list.array.TIntArrayList; -import io.deephaven.base.verify.Assert; -import io.deephaven.chunk.*; -import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.rowset.RowSequence; -import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.util.SoftRecycler; -import org.jetbrains.annotations.NotNull; - -import java.util.Arrays; -import java.util.function.LongFunction; -import java.util.function.ToLongFunction; - -import static io.deephaven.util.QueryConstants.NULL_LONG; - -/** - * Shared implementation for DateTimeArraySource and LongArraySource (ArraySources that have 'long' as their underlying - * element type). - */ -public abstract class AbstractLongArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.LongBacked { - private static SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new long[BLOCK_SIZE], null); - private long[][] blocks; - private transient long[][] prevBlocks; - - AbstractLongArraySource(Class type) { - super(type); - blocks = new long[INITIAL_NUMBER_OF_BLOCKS][]; - maxIndex = INITIAL_MAX_INDEX; - } - - @Override - public void startTrackingPrevValues() { - super.startTrackingPrev(blocks.length); - prevBlocks = new long[blocks.length][]; - } - - @Override - public void ensureCapacity(long capacity, boolean nullFill) { - ensureCapacity(capacity, blocks, prevBlocks, nullFill); - } - - /** - * This version of `prepareForParallelPopulation` will internally call {@link #ensureCapacity(long, boolean)} to - * make sure there is room for the incoming values. - * - * @param changedIndices indices in the dense table - */ - @Override - public void prepareForParallelPopulation(RowSet changedIndices) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); - if (ensurePreviousClockCycle == currentStep) { - throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); - } - ensurePreviousClockCycle = currentStep; - - if (changedIndices.isEmpty()) { - return; - } - - // ensure that this source will have sufficient capacity to store these indices, does not need to be - // null-filled as the values will be immediately written - ensureCapacity(changedIndices.lastRowKey() + 1, false); - - if (prevFlusher != null) { - prevFlusher.maybeActivate(); - } else { - // we are not tracking this source yet so we have nothing to do for the previous values - return; - } - - try (final RowSequence.Iterator it = changedIndices.getRowSequenceIterator()) { - do { - final long firstKey = it.peekNextKey(); - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - - final long[] inUse; - if (prevBlocks[block] == null) { - prevBlocks[block] = recycler.borrowItem(); - prevInUse[block] = inUse = inUseRecycler.borrowItem(); - if (prevAllocated == null) { - prevAllocated = new TIntArrayList(); - } - prevAllocated.add(block); - } else { - inUse = prevInUse[block]; - } - - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - it.getNextRowSequenceThrough(maxKeyInCurrentBlock).forAllRowKeys(key -> { - final int nextIndexWithinBlock = (int) (key & INDEX_MASK); - final int nextIndexWithinInUse = nextIndexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long nextMaskWithinInUse = 1L << (nextIndexWithinBlock & IN_USE_MASK); - prevBlocks[block][nextIndexWithinBlock] = blocks[block][nextIndexWithinBlock]; - inUse[nextIndexWithinInUse] |= nextMaskWithinInUse; - }); - } while (it.hasMore()); - } - } - - @Override - public final void set(long key, long value) { - final int block = (int) (key >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (key & INDEX_MASK); - if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = blocks[block][indexWithinBlock]; - } - blocks[block][indexWithinBlock] = value; - } - - @Override - public void setNull(long key) { - set(key, NULL_LONG); - } - - @Override - public final long getLong(long rowKey) { - if (rowKey < 0 || rowKey > maxIndex) { - return NULL_LONG; - } - return getUnsafe(rowKey); - } - - public final long getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); - return blocks[blockIndex][indexWithinBlock]; - } - - public final long getAndSetUnsafe(long index, long newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); - final long oldValue = blocks[blockIndex][indexWithinBlock]; - if (oldValue != newValue) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { - prevBlocks[blockIndex][indexWithinBlock] = oldValue; - } - blocks[blockIndex][indexWithinBlock] = newValue; - } - return oldValue; - } - - public final long getAndAddUnsafe(long index, long addend) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); - final long oldValue = blocks[blockIndex][indexWithinBlock]; - if (addend != 0) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { - prevBlocks[blockIndex][indexWithinBlock] = oldValue; - } - blocks[blockIndex][indexWithinBlock] = oldValue + addend; - } - return oldValue; - } - - @Override - public final long getPrevLong(long rowKey) { - if (rowKey < 0 || rowKey > maxIndex) { - return NULL_LONG; - } - final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (rowKey & INDEX_MASK); - if (shouldUsePrevious(rowKey)) { - return prevBlocks[blockIndex][indexWithinBlock]; - } else { - return blocks[blockIndex][indexWithinBlock]; - } - } - - @Override - public void shift(long start, long end, long offset) { - if (offset > 0) { - for (long i = (int) end; i >= start; i--) { - set((i + offset), getLong(i)); - } - } else { - for (int i = (int) start; i <= end; i++) { - set((i + offset), getLong(i)); - } - } - } - - public void move(long source, long dest, long length) { - if (prevBlocks != null) { - throw new UnsupportedOperationException(); - } - if (source == dest) { - return; - } - if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! - } - if (source < dest) { - for (long ii = length - 1; ii >= 0; ) { - final long sourceKey = source + ii; - final long destKey = dest + ii; - final int sourceBlock = (int) (sourceKey >> LOG_BLOCK_SIZE); - final int sourceIndexWithinBlock = (int) (sourceKey & INDEX_MASK); - - final int destBlock = (int) (destKey >> LOG_BLOCK_SIZE); - final int destIndexWithinBlock = (int) (destKey & INDEX_MASK); - - final int valuesInBothBlocks = Math.min(destIndexWithinBlock + 1, sourceIndexWithinBlock + 1); - final int toMove = (ii + 1) < valuesInBothBlocks ? (int)(ii + 1): valuesInBothBlocks; - - System.arraycopy(blocks[sourceBlock], sourceIndexWithinBlock - toMove + 1, blocks[destBlock], destIndexWithinBlock - toMove + 1, toMove); - ii -= toMove; - } - } else { - for (long ii = 0; ii < length;) { - final long sourceKey = source + ii; - final long destKey = dest + ii; - final int sourceBlock = (int) (sourceKey >> LOG_BLOCK_SIZE); - final int sourceIndexWithinBlock = (int) (sourceKey & INDEX_MASK); - - final int destBlock = (int) (destKey >> LOG_BLOCK_SIZE); - final int destIndexWithinBlock = (int) (destKey & INDEX_MASK); - - final int valuesInBothBlocks = BLOCK_SIZE - Math.max(destIndexWithinBlock, sourceIndexWithinBlock); - final int toMove = (BLOCK_SIZE - ii) < valuesInBothBlocks ? (int)(BLOCK_SIZE - ii): valuesInBothBlocks; - - System.arraycopy(blocks[sourceBlock], sourceIndexWithinBlock, blocks[destBlock], destIndexWithinBlock, toMove); - ii += toMove; - } - } - } - - @Override - long[] allocateNullFilledBlock(int size) { - final long[] newBlock = new long[size]; - Arrays.fill(newBlock, NULL_LONG); - return newBlock; - } - - @Override - final long[] allocateBlock(int size) { - return new long[size]; - } - - @Override - void resetBlocks(long[][] newBlocks, long[][] newPrev) { - blocks = newBlocks; - prevBlocks = newPrev; - } - - @Override - long[][] getPrevBlocks() { - return prevBlocks; - } - - @Override - SoftRecycler getRecycler() { - return recycler; - } - - @Override - Object getBlock(int blockIndex) { - return blocks[blockIndex]; - } - - @Override - Object getPrevBlock(int blockIndex) { - return prevBlocks[blockIndex]; - } - - @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices) { - fillSparseLongChunk(destGeneric, indices); - } - - protected final void fillSparseLongChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices) { - final long sz = indices.size(); - if (sz == 0) { - destGeneric.setSize(0); - return; - } - final WritableLongChunk dest = destGeneric.asWritableLongChunk(); - final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); - indices.forAllRowKeys((final long v) -> { - if (v >= ctx.capForCurrentBlock) { - ctx.currentBlockNo = getBlockNo(v); - ctx.capForCurrentBlock = (ctx.currentBlockNo + 1) << LOG_BLOCK_SIZE; - ctx.currentBlock = blocks[ctx.currentBlockNo]; - } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); - }); - dest.setSize(ctx.offset); - } - - @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices) { - fillSparsePrevLongChunk(destGeneric, indices); - } - - protected final void fillSparsePrevLongChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices) { - final long sz = indices.size(); - if (sz == 0) { - destGeneric.setSize(0); - return; - } - - if (prevFlusher == null) { - fillSparseLongChunk(destGeneric, indices); - return; - } - - final WritableLongChunk dest = destGeneric.asWritableLongChunk(); - final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); - indices.forAllRowKeys((final long v) -> { - if (v >= ctx.capForCurrentBlock) { - ctx.currentBlockNo = getBlockNo(v); - ctx.capForCurrentBlock = (ctx.currentBlockNo + 1) << LOG_BLOCK_SIZE; - ctx.currentBlock = blocks[ctx.currentBlockNo]; - ctx.currentPrevBlock = prevBlocks[ctx.currentBlockNo]; - ctx.prevInUseBlock = prevInUse[ctx.currentBlockNo]; - } - - final int indexWithinBlock = (int) (v & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); - }); - dest.setSize(ctx.offset); - } - - @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) { - fillSparseLongChunkUnordered(destGeneric, indices); - } - - protected final void fillSparseLongChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) { - final WritableLongChunk dest = destGeneric.asWritableLongChunk(); - final int sz = indices.size(); - for (int ii = 0; ii < sz; ii++) { - final long fromIndex = indices.get(ii); - if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_LONG); - continue; - } - final int blockNo = getBlockNo(fromIndex); - if (blockNo > blocks.length) { - dest.set(ii, NULL_LONG); - continue; - } - final long[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); - } - dest.setSize(sz); - } - - @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) { - fillSparsePrevLongChunkUnordered(destGeneric, indices); - } - - protected final void fillSparsePrevLongChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) { - final WritableLongChunk dest = destGeneric.asWritableLongChunk(); - final int sz = indices.size(); - for (int ii = 0; ii < sz; ii++) { - final long fromIndex = indices.get(ii); - if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_LONG); - continue; - } - final int blockNo = getBlockNo(fromIndex); - if (blockNo > blocks.length) { - dest.set(ii, NULL_LONG); - continue; - } - final long[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); - } - dest.setSize(sz); - } - - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices, final LongFunction mapper) { - final long sz = indices.size(); - if (sz == 0) { - destGeneric.setSize(0); - return; - } - final WritableObjectChunk dest = destGeneric.asWritableObjectChunk(); - final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); - indices.forAllRowKeys((final long v) -> { - if (v >= ctx.capForCurrentBlock) { - ctx.currentBlockNo = getBlockNo(v); - ctx.capForCurrentBlock = (ctx.currentBlockNo + 1) << LOG_BLOCK_SIZE; - ctx.currentBlock = blocks[ctx.currentBlockNo]; - } - dest.set(ctx.offset++, mapper.apply(ctx.currentBlock[(int) (v & INDEX_MASK)])); - }); - dest.setSize(ctx.offset); - } - - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence indices, final LongFunction mapper) { - final long sz = indices.size(); - if (sz == 0) { - destGeneric.setSize(0); - return; - } - - if (prevFlusher == null) { - fillSparseChunk(destGeneric, indices, mapper); - return; - } - - final WritableObjectChunk dest = destGeneric.asWritableObjectChunk(); - final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); - indices.forAllRowKeys((final long v) -> { - if (v >= ctx.capForCurrentBlock) { - ctx.currentBlockNo = getBlockNo(v); - ctx.capForCurrentBlock = (ctx.currentBlockNo + 1) << LOG_BLOCK_SIZE; - ctx.currentBlock = blocks[ctx.currentBlockNo]; - ctx.currentPrevBlock = prevBlocks[ctx.currentBlockNo]; - ctx.prevInUseBlock = prevInUse[ctx.currentBlockNo]; - } - - final int indexWithinBlock = (int) (v & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - final long currValue = usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]; - dest.set(ctx.offset++, mapper.apply(currValue)); - }); - dest.setSize(ctx.offset); - } - - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices, final LongFunction mapper) { - final WritableObjectChunk dest = destGeneric.asWritableObjectChunk(); - final int sz = indices.size(); - final R nullValue = mapper.apply(NULL_LONG); - for (int ii = 0; ii < sz; ii++) { - final long fromIndex = indices.get(ii); - if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, nullValue); - continue; - } - final int blockNo = getBlockNo(fromIndex); - if (blockNo > blocks.length) { - dest.set(ii, nullValue); - continue; - } - final long[] currentBlock = blocks[blockNo]; - dest.set(ii, mapper.apply(currentBlock[(int) (fromIndex & INDEX_MASK)])); - } - dest.setSize(sz); - } - - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices, final LongFunction mapper) { - final WritableObjectChunk dest = destGeneric.asWritableObjectChunk(); - final int sz = indices.size(); - final R nullValue = mapper.apply(NULL_LONG); - for (int ii = 0; ii < sz; ii++) { - final long fromIndex = indices.get(ii); - if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, nullValue); - continue; - } - final int blockNo = getBlockNo(fromIndex); - if (blockNo > blocks.length) { - dest.set(ii, nullValue); - continue; - } - final long[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, mapper.apply(currentBlock[(int) (fromIndex & INDEX_MASK)])); - } - dest.setSize(sz); - } - - @Override - public long resetWritableChunkToBackingStore(@NotNull ResettableWritableChunk chunk, long position) { - Assert.eqNull(prevInUse, "prevInUse"); - final int blockNo = getBlockNo(position); - final long [] backingArray = blocks[blockNo]; - chunk.asResettableWritableLongChunk().resetFromTypedArray(backingArray, 0, BLOCK_SIZE); - return blockNo << LOG_BLOCK_SIZE; - } - - @Override - public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChunk chunk, long position) { - Assert.eqNull(prevInUse, "prevInUse"); - final int blockNo = getBlockNo(position); - final long [] backingArray = blocks[blockNo]; - final long firstPosition = ((long) blockNo) << LOG_BLOCK_SIZE; - final int offset = (int)(position - firstPosition); - final int capacity = BLOCK_SIZE - offset; - chunk.asResettableWritableLongChunk().resetFromTypedArray(backingArray, offset, capacity); - return capacity; - } - - @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - final LongChunk chunk = src.asLongChunk(); - final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - int offset = 0; - // This helps us reduce the number of calls to Chunk.isAlias - long[] knownUnaliasedBlock = null; - for (int ii = 0; ii < ranges.size(); ii += 2) { - long firstKey = ranges.get(ii); - final long lastKey = ranges.get(ii + 1); - - while (firstKey <= lastKey) { - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); - final int length = (int) (lastKeyToUse - firstKey + 1); - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final long[] inner = blocks[block]; - - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - knownUnaliasedBlock = inner; - - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (trackPrevious) { - // this should be vectorized - for (int jj = 0; jj < length; ++jj) { - if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; - } - } - } - - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); - firstKey += length; - offset += length; - } - } - } - - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { - if (totalLength == 0) { - return; - } - final LongChunk chunk = src.asLongChunk(); - - final long lastKey = firstKey + totalLength - 1; - - while (firstKey <= lastKey) { - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); - final int length = (int) (lastKeyToUse - firstKey + 1); - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final long[] inner = blocks[block]; - - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); - firstKey += length; - offset += length; - } - } - - @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - final LongChunk chunk = src.asLongChunk(); - final LongChunk keys = rowSequence.asRowKeyChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { - ++lastII; - } - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final long[] inner = blocks[block]; - - if (chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - while (ii <= lastII) { - final long key = keys.get(ii); - final int indexWithinBlock = (int) (key & INDEX_MASK); - - if (trackPrevious) { - if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; - } - } - inner[indexWithinBlock] = chunk.get(ii); - ++ii; - } - } - } - - void fillFromChunkByRanges(final @NotNull RowSequence rowSequence, final Chunk src, final ToLongFunction mapper) { - final ObjectChunk chunk = src.asObjectChunk(); - final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - int offset = 0; - // This helps us reduce the number of calls to Chunk.isAlias - long[] knownUnaliasedBlock = null; - for (int ii = 0; ii < ranges.size(); ii += 2) { - long firstKey = ranges.get(ii); - final long lastKey = ranges.get(ii + 1); - - while (firstKey <= lastKey) { - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); - final int length = (int) (lastKeyToUse - firstKey + 1); - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final long[] inner = blocks[block]; - - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - knownUnaliasedBlock = inner; - - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (trackPrevious) { - // this should be vectorized - for (int jj = 0; jj < length; ++jj) { - if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; - } - } - } - - for (int jj = 0; jj < length; ++jj) { - inner[sIndexWithinBlock + jj] = mapper.applyAsLong(chunk.get(offset + jj)); - } - firstKey += length; - offset += length; - } - } - } - - void fillFromChunkByKeys(final @NotNull RowSequence rowSequence, final Chunk src, final ToLongFunction mapper) { - final ObjectChunk chunk = src.asObjectChunk(); - final LongChunk keys = rowSequence.asRowKeyChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { - ++lastII; - } - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final long[] inner = blocks[block]; - - if (chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - - if (trackPrevious) { - if (shouldRecordPrevious(ii, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; - } - } - inner[indexWithinBlock] = mapper.applyAsLong(chunk.get(ii)); - ++ii; - } - } - } - - @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - if (keys.size() == 0) { - return; - } - final LongChunk chunk = src.asLongChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final long[] inner = blocks[block]; - - if (chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - long key = keys.get(ii); - do { - final int indexWithinBlock = (int) (key & INDEX_MASK); - - if (trackPrevious) { - if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; - } - } - inner[indexWithinBlock] = chunk.get(ii); - ++ii; - } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); - } - } - - void fillFromChunkUnordered(@NotNull Chunk src, @NotNull LongChunk keys, final ToLongFunction mapper) { - if (keys.size() == 0) { - return; - } - final ObjectChunk chunk = src.asObjectChunk(); - - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final long[] inner = blocks[block]; - - if (chunk.isAlias(inner)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - long key = keys.get(ii); - do { - final int indexWithinBlock = (int) (key & INDEX_MASK); - - if (trackPrevious) { - if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; - } - } - inner[indexWithinBlock] = mapper.applyAsLong(chunk.get(ii)); - ++ii; - } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); - } - } -} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractSparseLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractSparseLongArraySource.java deleted file mode 100644 index 8baacace545..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractSparseLongArraySource.java +++ /dev/null @@ -1,965 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -/* - * --------------------------------------------------------------------------------------------------------------------- - * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit CharacterSparseArraySource and regenerate - * --------------------------------------------------------------------------------------------------------------------- - */ -package io.deephaven.engine.table.impl.sources; - -import io.deephaven.engine.table.impl.DefaultGetContext; -import io.deephaven.chunk.*; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; -import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateCommitter; -import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; -import io.deephaven.engine.rowset.RowSequence; -import io.deephaven.util.SoftRecycler; -import gnu.trove.list.array.TLongArrayList; -import org.apache.commons.lang3.mutable.MutableObject; -import org.jetbrains.annotations.NotNull; - -import java.util.Arrays; - -// region boxing imports -import static io.deephaven.util.QueryConstants.NULL_LONG; -import static io.deephaven.util.type.TypeUtils.box; -import static io.deephaven.util.type.TypeUtils.unbox; -// endregion boxing imports - -import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.*; - -/** - * Sparse array source for Long. - *

- * The C-haracterSparseArraySource is replicated to all other types with - * io.deephaven.engine.table.impl.sources.Replicate. - * - * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). - */ -abstract public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.LongBacked { - // region recyclers - private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new long[BLOCK_SIZE], null); - private static final SoftRecycler recycler2 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new long[BLOCK2_SIZE][], null); - private static final SoftRecycler recycler1 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new LongOneOrN.Block2[BLOCK1_SIZE], null); - private static final SoftRecycler recycler0 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new LongOneOrN.Block1[BLOCK0_SIZE], null); - // endregion recyclers - - /** - * The presence of a prevFlusher means that this ArraySource wants to track previous values. If prevFlusher is null, - * the ArraySource does not want (or does not yet want) to track previous values. Deserialized ArraySources never - * track previous values. - */ - protected transient UpdateCommitter prevFlusher = null; - - /** - * If prepareForParallelPopulation has been called, we need not check previous values when filling. - */ - private transient long prepareForParallelPopulationClockCycle = -1; - - /** - * Our previous page table could be very sparse, and we do not want to read through millions of nulls to find out - * what blocks to recycle. Instead we maintain a list of blocks that we have allocated (as the key shifted by - * BLOCK0_SHIFT). We recycle those blocks in the PrevFlusher; and accumulate the set of blocks that must be - * recycled from the next level array, and so on until we recycle the top-level prevBlocks and prevInUse arrays. - */ - private transient final TLongArrayList blocksToFlush = new TLongArrayList(); - - protected LongOneOrN.Block0 blocks; - protected transient LongOneOrN.Block0 prevBlocks; - - // region constructor - AbstractSparseLongArraySource(Class type) { - super(type); - blocks = new LongOneOrN.Block0(); - } - // endregion constructor - - @Override - public void ensureCapacity(long capacity, boolean nullFill) { - // Nothing to do here. Sparse array sources allocate on-demand and always null-fill. - } - - // region setNull - @Override - public void setNull(long key) { - final long [] blocks2 = blocks.getInnermostBlockByKeyOrNull(key); - if (blocks2 == null) { - return; - } - final int indexWithinBlock = (int) (key & INDEX_MASK); - if (blocks2[indexWithinBlock] == NULL_LONG) { - return; - } - - final long [] prevBlocksInner = shouldRecordPrevious(key); - if (prevBlocksInner != null) { - prevBlocksInner[indexWithinBlock] = blocks2[indexWithinBlock]; - } - blocks2[indexWithinBlock] = NULL_LONG; - } - // endregion setNull - - @Override - public final void set(long key, long value) { - final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; - final int indexWithinBlock = (int) (key & INDEX_MASK); - - final long [] blocksInner = ensureBlock(block0, block1, block2); - final long [] prevBlocksInner = shouldRecordPrevious(key); - if (prevBlocksInner != null) { - prevBlocksInner[indexWithinBlock] = blocksInner[indexWithinBlock]; - } - blocksInner[indexWithinBlock] = value; - } - - @Override - public void shift(final RowSet keysToShift, final long shiftDelta) { - final RowSet.SearchIterator it = (shiftDelta > 0) ? keysToShift.reverseIterator() : keysToShift.searchIterator(); - it.forEachLong((i) -> { - set(i + shiftDelta, getLong(i)); - setNull(i); - return true; - }); - } - - // region boxed methods - // endregion boxed methods - - // region primitive get - @Override - public final long getLong(long rowKey) { - if (rowKey < 0) { - return NULL_LONG; - } - return getLongFromBlock(blocks, rowKey); - } - - - @Override - public final long getPrevLong(long rowKey) { - if (rowKey < 0) { - return NULL_LONG; - } - if (shouldUsePrevious(rowKey)) { - return getLongFromBlock(prevBlocks, rowKey); - } - - return getLongFromBlock(blocks, rowKey); - } - - private long getLongFromBlock(LongOneOrN.Block0 blocks, long key) { - final long [] blocks2 = blocks.getInnermostBlockByKeyOrNull(key); - if (blocks2 == null) { - return NULL_LONG; - } - return blocks2[(int)(key & INDEX_MASK)]; - } - // endregion primitive get - - // region allocateNullFilledBlock - @SuppressWarnings("SameParameterValue") - final long [] allocateNullFilledBlock(int size) { - final long [] newBlock = new long[size]; - Arrays.fill(newBlock, NULL_LONG); - return newBlock; - } - // endregion allocateNullFilledBlock - - /** - * Make sure that we have an allocated block at the given point, allocating all of the required parents. - * @return {@code blocks.get(block0).get(block1).get(block2)}, which is non-null. - */ - long [] ensureBlock(final int block0, final int block1, final int block2) { - blocks.ensureIndex(block0, null); - LongOneOrN.Block1 blocks0 = blocks.get(block0); - if (blocks0 == null) { - blocks.set(block0, blocks0 = new LongOneOrN.Block1()); - } - LongOneOrN.Block2 blocks1 = blocks0.get(block1); - if (blocks1 == null) { - blocks0.ensureIndex(block1, null); - blocks0.set(block1, blocks1 = new LongOneOrN.Block2()); - } - - long [] result = blocks1.get(block2); - if (result == null) { - blocks1.ensureIndex(block2, null); - // we do not use the recycler here, because the recycler need not sanitize the block (the inUse recycling - // does that); yet we would like squeaky clean null filled blocks here. - result = allocateNullFilledBlock(BLOCK_SIZE); - blocks1.set(block2, result); - } - return result; - } - - /** - * Make sure that we have an allocated previous and inuse block at the given point, allocating all of the required - * parents. - * @return {@code prevBlocks.get(block0).get(block1).get(block2)}, which is non-null. - */ - private long [] ensurePrevBlock(final long key, final int block0, final int block1, final int block2) { - if (prevBlocks == null) { - prevBlocks = new LongOneOrN.Block0(); - prevInUse = new LongOneOrN.Block0(); - } - prevBlocks.ensureIndex(block0, recycler0); - prevInUse.ensureIndex(block0, inUse0Recycler); - LongOneOrN.Block1 blocks0 = prevBlocks.get(block0); - final LongOneOrN.Block1 inUse0; - if (blocks0 == null) { - prevBlocks.set(block0, blocks0 = new LongOneOrN.Block1()); - prevInUse.set(block0, inUse0 = new LongOneOrN.Block1()); - } else { - inUse0 = prevInUse.get(block0); - } - LongOneOrN.Block2 blocks1 = blocks0.get(block1); - final LongOneOrN.Block2 inUse1; - if (blocks1 == null) { - blocks0.ensureIndex(block1, recycler1); - inUse0.ensureIndex(block1, inUse1Recycler); - blocks0.set(block1, blocks1 = new LongOneOrN.Block2()); - inUse0.set(block1, inUse1 = new LongOneOrN.Block2()); - } else { - inUse1 = inUse0.get(block1); - } - long[] result = blocks1.get(block2); - if (result == null) { - blocks1.ensureIndex(block2, recycler2); - inUse1.ensureIndex(block2, inUse2Recycler); - - blocks1.set(block2, result = recycler.borrowItem()); - inUse1.set(block2, inUseRecycler.borrowItem()); - - blocksToFlush.add(key >> BLOCK2_SHIFT); - } - return result; - } - - @Override - public void startTrackingPrevValues() { - if (prevFlusher != null) { - throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + - this.getClass().getCanonicalName()); - } - prevFlusher = new UpdateCommitter<>(this, AbstractSparseLongArraySource::commitUpdates); - } - - private void commitUpdates() { - blocksToFlush.sort(); - - int destinationOffset = 0; - long lastBlock2Key = -1; - - final LongOneOrN.Block0 localPrevBlocks = prevBlocks; - final LongOneOrN.Block0 localPrevInUse = prevInUse; - - if (localPrevBlocks == null) { - assert prevInUse == null; - return; - } - - // there is no reason to allow these to be used anymore; instead we just null them out so that any - // getPrev calls will immediately return get(). - prevInUse = null; - prevBlocks = null; - - // we are clearing out values from block0, block1, block2, block - // we are accumulating values of block0, block1, block2 - for (int ii = 0; ii < blocksToFlush.size(); ii++) { - // blockKey = block0 | block1 | block2 - final long blockKey = blocksToFlush.getQuick(ii); - final long key = blockKey << LOG_BLOCK_SIZE; - final long block2key = key >> BLOCK1_SHIFT; - if (block2key != lastBlock2Key) { - blocksToFlush.set(destinationOffset++, block2key); - lastBlock2Key = block2key; - } - - final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; - - final LongOneOrN.Block2 blocks1 = localPrevBlocks.get(block0).get(block1); - final LongOneOrN.Block2 inUse1 = localPrevInUse.get(block0).get(block1); - final long [] pb = blocks1.get(block2); - final long[] inuse = inUse1.get(block2); - - inUse1.set(block2, null); - blocks1.set(block2, null); - - recycler.returnItem(pb); - inUseRecycler.returnItem(inuse); - } - - blocksToFlush.remove(destinationOffset, blocksToFlush.size() - destinationOffset); - destinationOffset = 0; - long lastBlock1key = -1; - - // we are clearing out values from block0, block1, block2 - // we are accumulating values of block0, block1 - for (int ii = 0; ii < blocksToFlush.size(); ii++) { - final long blockKey = blocksToFlush.getQuick(ii); - // blockKey = block0 | block1 - final long key = blockKey << BLOCK1_SHIFT; - final long block1Key = key >> BLOCK0_SHIFT; - - if (block1Key != lastBlock1key) { - blocksToFlush.set(destinationOffset++, block1Key); - lastBlock1key = block1Key; - } - - final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; - - final LongOneOrN.Block1 blocks0 = localPrevBlocks.get(block0); - final LongOneOrN.Block1 prevs0 = localPrevInUse.get(block0); - final LongOneOrN.Block2 pb2 = blocks0.get(block1); - final LongOneOrN.Block2 inuse = prevs0.get(block1); - - prevs0.set(block1, null); - blocks0.set(block1, null); - - pb2.maybeRecycle(recycler2); - inuse.maybeRecycle(inUse2Recycler); - } - - blocksToFlush.remove(destinationOffset, blocksToFlush.size() - destinationOffset); - - // we are clearing out values from block0, block1 - for (int ii = 0; ii < blocksToFlush.size(); ii++) { - final int block0 = (int) (blocksToFlush.getQuick(ii)) & BLOCK0_MASK; - final LongOneOrN.Block1 pb1 = localPrevBlocks.get(block0); - final LongOneOrN.Block1 inuse = localPrevInUse.get(block0); - - pb1.maybeRecycle(recycler1); - inuse.maybeRecycle(inUse1Recycler); - - localPrevInUse.set(block0, null); - localPrevBlocks.set(block0, null); - } - - blocksToFlush.clear(); - - // and finally recycle the top level block of blocks of blocks of blocks - localPrevBlocks.maybeRecycle(recycler0); - localPrevInUse.maybeRecycle(inUse0Recycler); - } - - /** - * Decides whether to record the previous value. - * @param key the row key to record - * @return If the caller should record the previous value, returns prev inner block, the value - * {@code prevBlocks.get(block0).get(block1).get(block2)}, which is non-null. Otherwise (if the caller should not - * record values), returns null. - */ - final long [] shouldRecordPrevious(final long key) { - if (!shouldTrackPrevious()) { - return null; - } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. - prevFlusher.maybeActivate(); - - final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; - - final int indexWithinBlock = (int) (key & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - final long[] prevBlockInner = ensurePrevBlock(key, block0, block1, block2); - final long[] inUse = prevInUse.get(block0).get(block1).get(block2); - - // Set value only if not already in use - if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { - inUse[indexWithinInUse] |= maskWithinInUse; - return prevBlockInner; - } - return null; - } - - @Override - public void prepareForParallelPopulation(RowSet changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); - if (prepareForParallelPopulationClockCycle == currentStep) { - throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); - } - prepareForParallelPopulationClockCycle = currentStep; - - if (changedRows.isEmpty()) { - return; - } - - if (prevFlusher != null) { - prevFlusher.maybeActivate(); - } - - try (final RowSequence.Iterator it = changedRows.getRowSequenceIterator()) { - do { - final long firstKey = it.peekNextKey(); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long[] block = ensureBlock(block0, block1, block2); - - if (prevFlusher == null) { - it.advance(maxKeyInCurrentBlock + 1); - continue; - } - - final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); - final long[] inUse = prevInUse.get(block0).get(block1).get(block2); - assert inUse != null; - - it.getNextRowSequenceThrough(maxKeyInCurrentBlock).forAllRowKeys(key -> { - final int indexWithinBlock = (int) (key & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - prevBlock[indexWithinBlock] = block[indexWithinBlock]; - inUse[indexWithinInUse] |= maskWithinInUse; - }); - } while (it.hasMore()); - } - } - - /** - * This method supports the 'getPrev' method for its inheritors, doing some of the 'inUse' housekeeping that is - * common to all inheritors. - * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a - * value from its "current" data structure. - */ - private boolean shouldUsePrevious(final long index) { - if (prevFlusher == null) { - return false; - } - - if (prevInUse == null) { - return false; - } - - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); - if (inUse == null) { - return false; - } - - final int indexWithinBlock = (int) (index & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - return (inUse[indexWithinInUse] & maskWithinInUse) != 0; - } - - // region fillByRanges - @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { - final WritableLongChunk chunk = dest.asWritableLongChunk(); - final FillByContext ctx = new FillByContext<>(); - rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { - if (firstKey > ctx.maxKeyInCurrentBlock) { - ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); - ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; - } - while (true) { - final long rightKeyForThisBlock = Math.min(lastKey, ctx.maxKeyInCurrentBlock); - final int length = (int) (rightKeyForThisBlock - firstKey + 1); - if (ctx.block == null) { - chunk.fillWithNullValue(ctx.offset, length); - } else { - final int sIndexWithinBlock = (int)(firstKey & INDEX_MASK); - // for the benefit of code generation. - final int offset = ctx.offset; - final long[] block = ctx.block; - // region copyFromTypedArray - chunk.copyFromTypedArray(block, sIndexWithinBlock, offset, length); - // endregion copyFromTypedArray - } - ctx.offset += length; - firstKey += length; - if (firstKey > lastKey) { - break; - } - ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); - ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; - } - }); - dest.setSize(ctx.offset); - } - // endregion fillByRanges - - // region fillByKeys - @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { - final WritableLongChunk chunk = dest.asWritableLongChunk(); - final FillByContext ctx = new FillByContext<>(); - rowSequence.forEachRowKey((final long v) -> { - if (v > ctx.maxKeyInCurrentBlock) { - ctx.block = blocks.getInnermostBlockByKeyOrNull(v); - ctx.maxKeyInCurrentBlock = v | INDEX_MASK; - } - if (ctx.block == null) { - chunk.fillWithNullValue(ctx.offset, 1); - } else { - chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); - } - ++ctx.offset; - return true; - }); - dest.setSize(ctx.offset); - } - // endregion fillByKeys - - // region fillByUnRowSequence - @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableLongChunk longChunk = dest.asWritableLongChunk(); - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - if (firstKey == RowSequence.NULL_ROW_KEY) { - longChunk.set(ii++, NULL_LONG); - continue; - } - final long masked = firstKey & ~INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size()) { - final int nextII = lastII + 1; - final long nextKey = keys.get(nextII); - final long nextMasked = nextKey & ~INDEX_MASK; - if (nextMasked != masked) { - break; - } - lastII = nextII; - } - final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - if (block == null) { - longChunk.fillWithNullValue(ii, lastII - ii + 1); - ii = lastII + 1; - continue; - } - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - longChunk.set(ii++, block[indexWithinBlock]); - } - } - dest.setSize(keys.size()); - } - - @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableLongChunk longChunk = dest.asWritableLongChunk(); - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - if (firstKey == RowSequence.NULL_ROW_KEY) { - longChunk.set(ii++, NULL_LONG); - continue; - } - final long masked = firstKey & ~INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size()) { - final int nextII = lastII + 1; - final long nextKey = keys.get(nextII); - final long nextMasked = nextKey & ~INDEX_MASK; - if (nextMasked != masked) { - break; - } - lastII = nextII; - } - - final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - if (block == null) { - longChunk.fillWithNullValue(ii, lastII - ii + 1); - ii = lastII + 1; - continue; - } - - final long [] prevInUse = (prevFlusher == null || this.prevInUse == null) ? null : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); - final long [] prevBlock = prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - final long[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - longChunk.set(ii++, blockToUse == null ? NULL_LONG : blockToUse[indexWithinBlock]); - } - } - dest.setSize(keys.size()); - } - // endregion fillByUnRowSequence - - // region fillFromChunkByRanges - @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - if (rowSequence.isEmpty()) { - return; - } - final LongChunk chunk = src.asLongChunk(); - final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - - final boolean trackPrevious = shouldTrackPrevious(); - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - int offset = 0; - // This helps us reduce the number of calls to Chunk.isAlias - long[] knownUnaliasedBlock = null; - for (int ii = 0; ii < ranges.size(); ii += 2) { - long firstKey = ranges.get(ii); - final long lastKey = ranges.get(ii + 1); - - while (firstKey <= lastKey) { - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); - final int length = (int) (lastKeyToUse - firstKey + 1); - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long [] block = ensureBlock(block0, block1, block2); - - if (block != knownUnaliasedBlock && chunk.isAlias(block)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - knownUnaliasedBlock = block; - - final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (trackPrevious) { - final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); - final long[] inUse = prevInUse.get(block0).get(block1).get(block2); - - assert inUse != null; - assert prevBlock != null; - - for (int jj = 0; jj < length; ++jj) { - final int indexWithinBlock = sIndexWithinBlock + jj; - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { - prevBlock[indexWithinBlock] = block[indexWithinBlock]; - inUse[indexWithinInUse] |= maskWithinInUse; - } - } - } - - // region copyToTypedArray - chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); - // endregion copyToTypedArray - - firstKey += length; - offset += length; - } - } - } - - private boolean shouldTrackPrevious() { - // prevFlusher == null means we are not tracking previous values yet (or maybe ever). - // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already - // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); - } - // endregion fillFromChunkByRanges - - // region fillFromChunkByKeys - @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - if (rowSequence.isEmpty()) { - return; - } - final LongChunk chunk = src.asLongChunk(); - final LongChunk keys = rowSequence.asRowKeyChunk(); - - final boolean trackPrevious = shouldTrackPrevious();; - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { - ++lastII; - } - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long [] block = ensureBlock(block0, block1, block2); - - if (chunk.isAlias(block)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - // This conditional with its constant condition should be very friendly to the branch predictor. - final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; - final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; - - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (trackPrevious) { - assert inUse != null; - assert prevBlock != null; - - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { - prevBlock[indexWithinBlock] = block[indexWithinBlock]; - inUse[indexWithinInUse] |= maskWithinInUse; - } - } - block[indexWithinBlock] = chunk.get(ii); - ++ii; - } - } - } - // endregion fillFromChunkByKeys - - // region nullByRanges - @Override - void nullByRanges(@NotNull RowSequence rowSequence) { - if (rowSequence.isEmpty()) { - return; - } - - final boolean hasPrev = prevFlusher != null; - - if (hasPrev) { - prevFlusher.maybeActivate(); - } - - try (RowSequence.Iterator okIt = rowSequence.getRowSequenceIterator()) { - while (okIt.hasMore()) { - final long firstKey = okIt.peekNextKey(); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final RowSequence blockOk = okIt.getNextRowSequenceThrough(maxKeyInCurrentBlock); - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - - if (block == null) { - continue; - } - - blockOk.forAllRowKeyRanges((s, e) -> { - final int length = (int)((e - s) + 1); - - final int sIndexWithinBlock = (int) (s & INDEX_MASK); - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (hasPrev) { - boolean prevRequired = false; - for (int jj = 0; jj < length; ++jj) { - final int indexWithinBlock = sIndexWithinBlock + jj; - if (block[indexWithinBlock] != NULL_LONG) { - prevRequired = true; - break; - } - } - - if (prevRequired) { - final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); - final long[] inUse = prevInUse.get(block0).get(block1).get(block2); - - assert inUse != null; - assert prevBlock != null; - - for (int jj = 0; jj < length; ++jj) { - final int indexWithinBlock = sIndexWithinBlock + jj; - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { - prevBlock[indexWithinBlock] = block[indexWithinBlock]; - inUse[indexWithinInUse] |= maskWithinInUse; - } - } - - Arrays.fill(block, sIndexWithinBlock, sIndexWithinBlock + length, NULL_LONG); - } - } else { - Arrays.fill(block, sIndexWithinBlock, sIndexWithinBlock + length, NULL_LONG); - } - }); - } - } - } - // endregion nullByRanges - - // region nullByKeys - @Override - void nullByKeys(@NotNull RowSequence rowSequence) { - if (rowSequence.isEmpty()) { - return; - } - - final boolean hasPrev = prevFlusher != null; - - if (hasPrev) { - prevFlusher.maybeActivate(); - } - - try (RowSequence.Iterator okIt = rowSequence.getRowSequenceIterator()) { - while (okIt.hasMore()) { - final long firstKey = okIt.peekNextKey(); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final RowSequence blockOk = okIt.getNextRowSequenceThrough(maxKeyInCurrentBlock); - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long[] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - if (block == null) { - continue; - } - - MutableObject prevBlock = new MutableObject<>(); - MutableObject inUse = new MutableObject<>(); - - blockOk.forAllRowKeys(key -> { - - final int indexWithinBlock = (int) (key & INDEX_MASK); - // This 'if' with its constant condition should be very friendly to the branch predictor. - if (hasPrev) { - - final long oldValue = block[indexWithinBlock]; - if (oldValue != NULL_LONG) { - if (prevBlock.getValue() == null) { - prevBlock.setValue(ensurePrevBlock(firstKey, block0, block1, block2)); - inUse.setValue(prevInUse.get(block0).get(block1).get(block2)); - } - - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - if ((inUse.getValue()[indexWithinInUse] & maskWithinInUse) == 0) { - prevBlock.getValue()[indexWithinBlock] = oldValue; - inUse.getValue()[indexWithinInUse] |= maskWithinInUse; - } - } - } - block[indexWithinBlock] = NULL_LONG; - }); - } - } - } - // endregion nullByKeys - - // region fillFromChunkUnordered - @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - if (keys.size() == 0) { - return; - } - final LongChunk chunk = src.asLongChunk(); - - final boolean trackPrevious = shouldTrackPrevious();; - - if (trackPrevious) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size(); ) { - final long firstKey = keys.get(ii); - final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long [] block = ensureBlock(block0, block1, block2); - - if (chunk.isAlias(block)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - // This conditional with its constant condition should be very friendly to the branch predictor. - final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; - final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; - - long key = keys.get(ii); - do { - final int indexWithinBlock = (int) (key & INDEX_MASK); - - if (trackPrevious) { - assert inUse != null; - - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { - prevBlock[indexWithinBlock] = block[indexWithinBlock]; - inUse[indexWithinInUse] |= maskWithinInUse; - } - } - block[indexWithinBlock] = chunk.get(ii); - ++ii; - } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); - } - } - // endregion fillFromChunkUnordered - - @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { - if (prevFlusher == null) { - fillChunk(context, dest, rowSequence); - return; - } - defaultFillPrevChunk(context, dest, rowSequence); - } - - // region getChunk - @Override - public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - if (rowSequence.isEmpty()) { - return LongChunk.getEmptyChunk(); - } - final long firstKey = rowSequence.firstRowKey(); - final long lastKey = rowSequence.lastRowKey(); - if ((lastKey - firstKey + 1) == rowSequence.size() && (firstKey >> BLOCK2_SHIFT == lastKey >> BLOCK2_SHIFT)) { - // it's a contiguous range, in a single block - return DefaultGetContext.resetChunkFromArray(context, - blocks.getInnermostBlockByKeyOrNull(firstKey), - (int) (firstKey & INDEX_MASK), - (int) rowSequence.size()); - } - return getChunkByFilling(context, rowSequence).asLongChunk(); - } - // endregion getChunk - - // region getPrevChunk - @Override - public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - if (prevFlusher == null) { - return getChunk(context, rowSequence); - } - return getPrevChunkByFilling(context, rowSequence).asLongChunk(); - } - // endregion getPrevChunk - - // region reinterpretation - // endregion reinterpretation -} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java index 5d2aa170e19..1c3c8343ae7 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java @@ -415,6 +415,8 @@ public static ArrayBackedColumnSource getMemoryColumnSource(final long si result = new BooleanArraySource(); } else if (dataType == DateTime.class) { result = new DateTimeArraySource(); + } else if (dataType == Instant.class) { + result = new InstantArraySource(); } else { if (componentType != null) { result = new ObjectArraySource<>(dataType, componentType); @@ -524,38 +526,6 @@ static int getBlockNo(final long from) { abstract Object getPrevBlock(int blockIndex); - @Override - public void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, - @NotNull final RowSequence rowSequence) { - if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { - fillSparseChunk(destination, rowSequence); - return; - } - MutableInt destOffset = new MutableInt(0); - rowSequence.forAllRowKeyRanges((final long from, final long to) -> { - final int fromBlock = getBlockNo(from); - final int toBlock = getBlockNo(to); - final int fromOffsetInBlock = (int) (from & INDEX_MASK); - if (fromBlock == toBlock) { - final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); - destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); - destOffset.add(sz); - } else { - final int sz = BLOCK_SIZE - fromOffsetInBlock; - destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); - destOffset.add(sz); - for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { - destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); - destOffset.add(BLOCK_SIZE); - } - int restSz = (int) (to & INDEX_MASK) + 1; - destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); - destOffset.add(restSz); - } - }); - destination.setSize(destOffset.intValue()); - } - @Override public void fillChunkUnordered(@NotNull final FillContext context, @NotNull final WritableChunk destination, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java index 2d4e1cb6e1c..1e0a065fcbd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java @@ -7,9 +7,9 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; -import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; @@ -17,7 +17,6 @@ import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.SoftRecycler; import io.deephaven.util.datastructures.LongSizedDataStructure; -import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -45,7 +44,7 @@ abstract class ArraySourceHelper extends ArrayBackedColumnSource super(type, componentType); } - private static class FillContext implements ColumnSource.FillContext { + static class FillContext implements ColumnSource.FillContext { final CopyKernel copyKernel; FillContext(ChunkType chunkType) { @@ -59,7 +58,7 @@ public boolean supportsUnboundedFill() { } @Override - public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { + public ChunkSource.FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return makeFillContext(getChunkType()); } @@ -68,61 +67,6 @@ FillContext makeFillContext(ChunkType chunkType) { return new FillContext(chunkType); } - private interface CopyFromBlockFunctor { - void copy(int blockNo, int srcOffset, int length); - } - - @Override - public void fillPrevChunk( - @NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final RowSequence rowSequence) { - if (prevFlusher == null) { - fillChunk(context, destination, rowSequence); - return; - } - - if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { - fillSparsePrevChunk(destination, rowSequence); - return; - } - - final FillContext effectiveContext = (FillContext) context; - final MutableInt destOffset = new MutableInt(0); - - CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { - final long[] inUse = prevInUse[blockNo]; - if (inUse != null) { - effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), - inUse, srcOffset, destOffset.intValue(), length); - } else { - destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); - } - destOffset.add(length); - }; - - rowSequence.forAllRowKeyRanges((final long from, final long to) -> { - final int fromBlock = getBlockNo(from); - final int toBlock = getBlockNo(to); - final int fromOffsetInBlock = (int) (from & INDEX_MASK); - if (fromBlock == toBlock) { - final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); - lambda.copy(fromBlock, fromOffsetInBlock, sz); - } else { - final int sz = BLOCK_SIZE - fromOffsetInBlock; - lambda.copy(fromBlock, fromOffsetInBlock, sz); - - for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { - lambda.copy(blockNo, 0, BLOCK_SIZE); - } - - int restSz = (int) (to & INDEX_MASK) + 1; - lambda.copy(toBlock, 0, restSz); - } - }); - destination.setSize(destOffset.intValue()); - } - /** * Get the capacity of this column source. This number is one higher than the highest key that may be accessed (read * or written). diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java index 05925677685..38c931caf4d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java @@ -14,6 +14,7 @@ import static io.deephaven.util.BooleanUtils.NULL_BOOLEAN_AS_BYTE; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -51,7 +52,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class BooleanSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForBoolean { +public class BooleanSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForBoolean /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new byte[BLOCK_SIZE], null); @@ -414,7 +416,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -489,8 +491,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -528,8 +535,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -539,7 +551,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, BooleanUtils.byteAsBoolean(ctx.block[(int) (v & INDEX_MASK)])); + // endregion conversion } ++ctx.offset; return true; @@ -550,12 +564,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk booleanObjectChunk = dest.asWritableObjectChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - booleanObjectChunk.set(ii++, NULL_BOOLEAN); + chunk.set(ii++, NULL_BOOLEAN); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -571,25 +590,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final byte [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - booleanObjectChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - booleanObjectChunk.set(ii++, BooleanUtils.byteAsBoolean(block[indexWithinBlock])); + // region conversion + chunk.set(ii++, BooleanUtils.byteAsBoolean(block[indexWithinBlock])); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk booleanObjectChunk = dest.asWritableObjectChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - booleanObjectChunk.set(ii++, NULL_BOOLEAN); + chunk.set(ii++, NULL_BOOLEAN); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -606,7 +632,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final byte [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - booleanObjectChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -619,7 +645,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final byte[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - booleanObjectChunk.set(ii++, blockToUse == null ? NULL_BOOLEAN : BooleanUtils.byteAsBoolean(blockToUse[indexWithinBlock])); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_BOOLEAN : BooleanUtils.byteAsBoolean(blockToUse[indexWithinBlock])); + // endregion conversion } } dest.setSize(keys.size()); @@ -628,11 +656,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -706,11 +739,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -755,7 +793,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -940,7 +986,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = BooleanUtils.booleanAsByte(chunk.get(ii)); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -948,7 +996,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedColumnSource.java index 2e0a60d5f56..53b429f47a3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedColumnSource.java @@ -16,6 +16,9 @@ import io.deephaven.engine.rowset.RowSequence; import org.jetbrains.annotations.NotNull; +import java.time.Instant; +import java.time.ZonedDateTime; + /** * {@link ColumnSource} implementation for explicitly boxing a primitive into a more complex type, e.g. {@code byte} as * {@link Boolean} or {@code long} as {@link DateTime}. @@ -49,7 +52,7 @@ private BoxedFillContext(@NotNull final ColumnSource originalSource, final in } @Override - public final void close() { + public void close() { originalGetContext.close(); } } @@ -100,17 +103,17 @@ static final class OfBoolean extends BoxedColumnSource { } @Override - public final Boolean get(final long rowKey) { + public Boolean get(final long rowKey) { return BooleanUtils.byteAsBoolean(originalSource.getByte(rowKey)); } @Override - public final Boolean getPrev(final long rowKey) { + public Boolean getPrev(final long rowKey) { return BooleanUtils.byteAsBoolean(originalSource.getPrevByte(rowKey)); } @Override - final void transformChunk(@NotNull final Chunk source, + void transformChunk(@NotNull final Chunk source, @NotNull final WritableChunk destination) { final ByteChunk typedSource = source.asByteChunk(); final WritableObjectChunk typedDestination = destination.asWritableObjectChunk(); @@ -131,17 +134,17 @@ public OfDateTime(@NotNull final ColumnSource originalSource) { } @Override - public final DateTime get(final long rowKey) { + public DateTime get(final long rowKey) { return DateTimeUtils.nanosToTime(originalSource.getLong(rowKey)); } @Override - public final DateTime getPrev(final long rowKey) { + public DateTime getPrev(final long rowKey) { return DateTimeUtils.nanosToTime(originalSource.getPrevLong(rowKey)); } @Override - final void transformChunk(@NotNull final Chunk source, + void transformChunk(@NotNull final Chunk source, @NotNull final WritableChunk destination) { final LongChunk typedSource = source.asLongChunk(); final WritableObjectChunk typedDestination = @@ -155,6 +158,38 @@ final void transformChunk(@NotNull final Chunk source, } } + public static final class OfInstant extends BoxedColumnSource { + + public OfInstant(@NotNull final ColumnSource originalSource) { + super(Instant.class, originalSource); + Assert.eq(originalSource.getType(), "originalSource.getType()", long.class); + } + + @Override + public Instant get(final long rowKey) { + return DateTimeUtils.makeInstant(originalSource.getLong(rowKey)); + } + + @Override + public Instant getPrev(final long rowKey) { + return DateTimeUtils.makeInstant(originalSource.getPrevLong(rowKey)); + } + + @Override + void transformChunk(@NotNull final Chunk source, + @NotNull final WritableChunk destination) { + final LongChunk typedSource = source.asLongChunk(); + final WritableObjectChunk typedDestination = + destination.asWritableObjectChunk(); + + final int sourceSize = typedSource.size(); + for (int pi = 0; pi < sourceSize; ++pi) { + typedDestination.set(pi, DateTimeUtils.makeInstant(typedSource.get(pi))); + } + typedDestination.setSize(sourceSize); + } + } + @Override public boolean isStateless() { return originalSource.isStateless(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedLongAsTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedLongAsTimeSource.java new file mode 100644 index 00000000000..b4699a043c3 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BoxedLongAsTimeSource.java @@ -0,0 +1,108 @@ +package io.deephaven.engine.table.impl.sources;/* + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ + +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import org.jetbrains.annotations.NotNull; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} values to various Time types. + */ +public abstract class BoxedLongAsTimeSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForObject { + private final ColumnSource alternateColumnSource; + + private class BoxingFillContext implements FillContext { + final FillContext alternateFillContext; + final WritableLongChunk innerChunk; + + private BoxingFillContext(final int chunkCapacity, final SharedContext sharedContext) { + alternateFillContext = alternateColumnSource.makeFillContext(chunkCapacity, sharedContext); + innerChunk = WritableLongChunk.makeWritableChunk(chunkCapacity); + } + + @Override + public void close() { + alternateFillContext.close(); + innerChunk.close(); + } + } + + public BoxedLongAsTimeSource(final Class type, ColumnSource alternateColumnSource) { + super(type); + this.alternateColumnSource = alternateColumnSource; + } + + protected abstract TIME_TYPE makeValue(long val); + + @Override + public TIME_TYPE get(long index) { + return makeValue(alternateColumnSource.getLong(index)); + } + + @Override + public TIME_TYPE getPrev(long index) { + return makeValue(alternateColumnSource.getPrevLong(index)); + } + + @Override + public boolean isImmutable() { + return alternateColumnSource.isImmutable(); + } + + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateColumnSource.allowsReinterpret(alternateDataType) + || alternateDataType == alternateColumnSource.getType(); + } + + @Override + public ColumnSource doReinterpret( + @NotNull final Class alternateDataType) throws IllegalArgumentException { + //noinspection unchecked + return alternateDataType == alternateColumnSource.getType() + ? (ColumnSource) alternateColumnSource + : alternateColumnSource.reinterpret(alternateDataType); + } + + @Override + public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { + return new BoxingFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + final BoxingFillContext fillContext = (BoxingFillContext) context; + final WritableLongChunk innerChunk = fillContext.innerChunk; + alternateColumnSource.fillChunk(fillContext.alternateFillContext, innerChunk, rowSequence); + convertToType(destination, innerChunk); + } + + @Override + public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + final BoxingFillContext fillContext = (BoxingFillContext) context; + final WritableLongChunk innerChunk = fillContext.innerChunk; + alternateColumnSource.fillPrevChunk(fillContext.alternateFillContext, innerChunk, rowSequence); + convertToType(destination, innerChunk); + } + + private void convertToType(@NotNull WritableChunk destination, LongChunk innerChunk) { + final WritableObjectChunk dest = destination.asWritableObjectChunk(); + for (int ii = 0; ii < innerChunk.size(); ++ii) { + dest.set(ii, makeValue(innerChunk.get(ii))); + } + dest.setSize(innerChunk.size()); + } +} \ No newline at end of file diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java index f141c7ae4fd..b66b3563264 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java @@ -17,10 +17,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.ByteComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -37,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class ByteArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForByte { +public class ByteArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForByte /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new byte[BLOCK_SIZE], null); @@ -79,7 +84,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -168,6 +173,9 @@ public final byte getAndSetUnsafe(long index, byte newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Byte getPrev(long rowKey) { return box(getPrevByte(rowKey)); @@ -208,7 +216,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -304,13 +312,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableByteChunk dest = destGeneric.asWritableByteChunk(); + // region chunkDecl + final WritableByteChunk chunk = destGeneric.asWritableByteChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -318,13 +446,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -332,11 +467,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableByteChunk dest = destGeneric.asWritableByteChunk(); + // region chunkDecl + final WritableByteChunk chunk = destGeneric.asWritableByteChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -351,59 +488,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableByteChunk dest = destGeneric.asWritableByteChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destGeneric.asWritableByteChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); } else { final byte[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableByteChunk dest = destGeneric.asWritableByteChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destGeneric.asWritableByteChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); continue; } final byte[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -424,33 +588,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final byte[] inner = blocks[block]; + final byte[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -463,22 +630,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final byte[] inner = blocks[block]; + final byte[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -495,10 +668,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final byte[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final byte[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -508,21 +681,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -535,10 +718,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final byte[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final byte[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -548,12 +731,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java index 9c016546f00..cb509bdb0aa 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -45,7 +46,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class ByteSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForByte { +public class ByteSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForByte /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new byte[BLOCK_SIZE], null); @@ -408,7 +410,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -483,8 +485,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -520,8 +527,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -531,7 +543,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -542,12 +556,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableByteChunk byteChunk = dest.asWritableByteChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - byteChunk.set(ii++, NULL_BYTE); + chunk.set(ii++, NULL_BYTE); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -563,25 +582,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final byte [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - byteChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - byteChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableByteChunk byteChunk = dest.asWritableByteChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - byteChunk.set(ii++, NULL_BYTE); + chunk.set(ii++, NULL_BYTE); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -598,7 +624,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final byte [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - byteChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -611,7 +637,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final byte[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - byteChunk.set(ii++, blockToUse == null ? NULL_BYTE : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_BYTE : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -620,11 +648,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -696,11 +729,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -745,7 +783,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -930,7 +976,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -938,7 +986,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -948,7 +999,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ByteChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return ByteChunk.getEmptyChunk(); } @@ -967,7 +1018,7 @@ public ByteChunk getChunk(@NotNull GetContext context, @NotNull RowSeque // region getPrevChunk @Override - public ByteChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ByteChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java index dfde46a223f..ffd1c32e29c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java @@ -12,10 +12,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.CharComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -32,7 +36,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class CharacterArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForChar { +public class CharacterArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForChar /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new char[BLOCK_SIZE], null); @@ -74,7 +79,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -163,6 +168,9 @@ public final char getAndSetUnsafe(long index, char newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Character getPrev(long rowKey) { return box(getPrevChar(rowKey)); @@ -203,7 +211,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -299,13 +307,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableCharChunk dest = destGeneric.asWritableCharChunk(); + // region chunkDecl + final WritableCharChunk chunk = destGeneric.asWritableCharChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -313,13 +441,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -327,11 +462,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableCharChunk dest = destGeneric.asWritableCharChunk(); + // region chunkDecl + final WritableCharChunk chunk = destGeneric.asWritableCharChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -346,59 +483,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableCharChunk dest = destGeneric.asWritableCharChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destGeneric.asWritableCharChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); } else { final char[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableCharChunk dest = destGeneric.asWritableCharChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destGeneric.asWritableCharChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); continue; } final char[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -419,33 +583,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final char[] inner = blocks[block]; + final char[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -458,22 +625,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final char[] inner = blocks[block]; + final char[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -490,10 +663,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final char[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final char[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -503,21 +676,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -530,10 +713,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final char[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final char[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -543,12 +726,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java index 238b5ad5262..56e51f39de5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -40,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class CharacterSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForChar { +public class CharacterSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForChar /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new char[BLOCK_SIZE], null); @@ -403,7 +405,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -478,8 +480,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -515,8 +522,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -526,7 +538,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -537,12 +551,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableCharChunk charChunk = dest.asWritableCharChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - charChunk.set(ii++, NULL_CHAR); + chunk.set(ii++, NULL_CHAR); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -558,25 +577,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final char [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - charChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - charChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableCharChunk charChunk = dest.asWritableCharChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - charChunk.set(ii++, NULL_CHAR); + chunk.set(ii++, NULL_CHAR); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -593,7 +619,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final char [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - charChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -606,7 +632,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final char[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - charChunk.set(ii++, blockToUse == null ? NULL_CHAR : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_CHAR : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -615,11 +643,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -691,11 +724,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -740,7 +778,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -925,7 +971,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -933,7 +981,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -943,7 +994,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public CharChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return CharChunk.getEmptyChunk(); } @@ -962,7 +1013,7 @@ public CharChunk getChunk(@NotNull GetContext context, @NotNull RowSeque // region getPrevChunk @Override - public CharChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public CharChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ConvertableTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ConvertableTimeSource.java new file mode 100644 index 00000000000..63bec1eb8a3 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ConvertableTimeSource.java @@ -0,0 +1,71 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTime; + +import java.time.*; + +/** + * An interface for {@link ColumnSource}s that indicate that it both represents a time value, and may be converted + * between other common time values efficiently. + */ +public interface ConvertableTimeSource { + /** + * Convert this source to a {@link ZonedDateTime} source at the specified {@link ZoneId zone}. + * + * @param zone the time zone + * @return a view of this source as a {@link ZonedDateTime} + */ + ColumnSource toZonedDateTime(ZoneId zone); + + /** + * Convert this source to a {@link LocalDate} source at the specified {@link ZoneId zone}. + * + * @param zone the time zone + * @return a view of this source as a {@link LocalDate} + */ + ColumnSource toLocalDate(ZoneId zone); + + /** + * Convert this source to a {@link LocalTime} source at the specified {@link ZoneId zone}. + * + * @param zone the time zone + * @return a view of this source as a {@link LocalTime} + */ + ColumnSource toLocalTime(ZoneId zone); + + /** + * Convert this source to an {@link Instant} source. + * + * @return a view of this source as a {@link Instant} + */ + ColumnSource toInstant(); + + /** + * Convert this source to a {@link DateTime} source. + * + * @return a view of this source as a {@link DateTime} + */ + ColumnSource toDateTime(); + + /** + * Convert this source to a {@code long} source of nanoseconds of epoch. + * + * @return a view of this source as a {@link ZonedDateTime} + */ + ColumnSource toEpochNano(); + + /** + * Check if this class supports time conversion. If false, all other methods will fail. + * + * @return true if time conversion is supported. + */ + boolean supportsTimeConversion(); + + interface Zoned { + ZoneId getZone(); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java index f0d9c4382e1..de4d7ea07b1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java @@ -3,268 +3,37 @@ */ package io.deephaven.engine.table.impl.sources; -import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.WritableColumnSource; -import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.time.DateTime; import io.deephaven.time.DateTimeUtils; -import io.deephaven.chunk.*; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; -import io.deephaven.engine.rowset.RowSequence; import org.jetbrains.annotations.NotNull; -import static io.deephaven.util.QueryConstants.NULL_LONG; - /** - * Array-backed ColumnSource for DateTimes. Allows reinterpret as long. + * Array-backed {@link ColumnSource} for DBDateTimes. Allows reinterpretation to long and {@link java.time.Instant}. */ -public class DateTimeArraySource extends AbstractLongArraySource { - +public class DateTimeArraySource extends NanosBasedTimeArraySource + implements MutableColumnSourceGetDefaults.ForLongAsDateTime, ConvertableTimeSource { public DateTimeArraySource() { super(DateTime.class); } - @Override - public void setNull(long key) { - set(key, NULL_LONG); + public DateTimeArraySource(final @NotNull LongArraySource nanoSource) { + super(DateTime.class, nanoSource); } @Override - public void set(long key, DateTime value) { - set(key, value == null ? NULL_LONG : value.getNanos()); - } - - @Override - public DateTime get(long rowKey) { - final long nanos = getLong(rowKey); + protected DateTime makeValue(long nanos) { return DateTimeUtils.nanosToTime(nanos); } @Override - public DateTime getPrev(long rowKey) { - final long nanos = getPrevLong(rowKey); - return DateTimeUtils.nanosToTime(nanos); - } - - - @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == long.class; - } - - // the ArrayBackedColumnSource fillChunk can't handle changing the type - @Override - public void fillChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, - @NotNull RowSequence rowSequence) { - final ChunkFiller filler = ChunkFiller.forChunkType(dest.getChunkType()); - if (rowSequence.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { - filler.fillByRanges(this, rowSequence, dest); - } else { - filler.fillByIndices(this, rowSequence, dest); - } - } - - @Override - public void fillPrevChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, - @NotNull RowSequence rowSequence) { - final ChunkFiller filler = ChunkFiller.forChunkType(dest.getChunkType()); - if (rowSequence.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { - filler.fillPrevByRanges(this, rowSequence, dest); - } else { - filler.fillPrevByIndices(this, rowSequence, dest); - } - } - - @Override - public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - return getChunkByFilling(context, rowSequence); - } - - @Override - public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - return getPrevChunkByFilling(context, rowSequence); - } - - @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, - @NotNull final RowSequence indices) { - super.fillSparseChunk(destGeneric, indices, DateTimeUtils::nanosToTime); - } - - @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, - @NotNull final RowSequence indices) { - super.fillSparsePrevChunk(destGeneric, indices, DateTimeUtils::nanosToTime); - } - - @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { - super.fillSparseChunkUnordered(destGeneric, indices, DateTimeUtils::nanosToTime); - } - - @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { - super.fillSparsePrevChunkUnordered(destGeneric, indices, DateTimeUtils::nanosToTime); - } - - @Override - public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - super.fillFromChunkByRanges(rowSequence, src, DateTimeUtils::nanos); - } - - @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - super.fillFromChunkByKeys(rowSequence, src, DateTimeUtils::nanos); - } - - @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull LongChunk keys) { - super.fillFromChunkUnordered(src, keys, DateTimeUtils::nanos); - } - - @Override - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - // noinspection unchecked - return (ColumnSource) new ReinterpretedAsLong(); - } - - private class ReinterpretedAsLong extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForLong, FillUnordered, WritableColumnSource { - private ReinterpretedAsLong() { - super(long.class); - } - - @Override - public void startTrackingPrevValues() { - DateTimeArraySource.this.startTrackingPrevValues(); - } - - @Override - public long getLong(long rowKey) { - return DateTimeArraySource.this.getLong(rowKey); - } - - @Override - public long getPrevLong(long rowKey) { - return DateTimeArraySource.this.getPrevLong(rowKey); - } - - @Override - public boolean allowsReinterpret(@NotNull Class alternateDataType) { - return alternateDataType == DateTime.class; - } - - @Override - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - return (ColumnSource) DateTimeArraySource.this; - } - - @Override - public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { - return DateTimeArraySource.super.makeFillContext(getChunkType()); - } - - @Override - public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - // can't defer this case to super as they will ultimately call a method on DateTimeArraySource instead of - // AbstractLongArraySource - if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { - fillSparseLongChunk(destination, rowSequence); - } else { - DateTimeArraySource.super.fillChunk(context, destination, rowSequence); - } - } - - @Override - public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - // can't defer these two cases to super as they will ultimately call a method on DateTimeArraySource instead - // of AbstractLongArraySource - if (prevFlusher == null) { - fillChunk(context, destination, rowSequence); - return; - } - - if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { - fillSparsePrevLongChunk(destination, rowSequence); - return; - } - - DateTimeArraySource.super.fillPrevChunk(context, destination, rowSequence); - } - - @Override - public void fillChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { - fillSparseLongChunkUnordered(destination, keyIndices); - } - - @Override - public void fillPrevChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { - fillSparsePrevLongChunkUnordered(destination, keyIndices); - } - - @Override - public void setNull(long key) { - DateTimeArraySource.super.setNull(key); - } - - @Override - public void set(long key, long value) { - DateTimeArraySource.super.set(key, value); - } - - @Override - public void ensureCapacity(long capacity, boolean nullFill) { - DateTimeArraySource.this.ensureCapacity(capacity, nullFill); - } - - @Override - public FillFromContext makeFillFromContext(int chunkCapacity) { - return DateTimeArraySource.super.makeFillFromContext(chunkCapacity); - } - - @Override - public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull RowSequence rowSequence) { - // Note: we cannot call super.fillFromChunk here as that method will call the override versions that expect - // ObjectChunks. - if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { - DateTimeArraySource.super.fillFromChunkByKeys(rowSequence, src); - } else { - DateTimeArraySource.super.fillFromChunkByRanges(rowSequence, src); - } - } - - @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull LongChunk keys) { - DateTimeArraySource.super.fillFromChunkUnordered(context, src, keys); - } - - @Override - public boolean providesFillUnordered() { - return true; - } + protected long toNanos(DateTime value) { + return DateTimeUtils.nanos(value); } @Override - public boolean exposesChunkedBackingStore() { - // our backing store is not a DateTime chunk - return false; + public ColumnSource toDateTime() { + return this; } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeAsLongColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeAsLongColumnSource.java index 5784ccc3e2c..fe24e0ec4a7 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeAsLongColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeAsLongColumnSource.java @@ -3,98 +3,20 @@ */ package io.deephaven.engine.table.impl.sources; -import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.AbstractColumnSource; -import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.time.DateTime; import io.deephaven.time.DateTimeUtils; -import io.deephaven.chunk.*; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.rowset.RowSequence; -import org.jetbrains.annotations.NotNull; /** * Reinterpret result {@link ColumnSource} implementations that translates {@link DateTime} to {@code long} values. */ -public class DateTimeAsLongColumnSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForLong { - - private final ColumnSource alternateColumnSource; - - public DateTimeAsLongColumnSource(@NotNull final ColumnSource alternateColumnSource) { - super(long.class); - this.alternateColumnSource = alternateColumnSource; - } - - @Override - public long getLong(final long rowKey) { - return DateTimeUtils.nanos(alternateColumnSource.get(rowKey)); - } - - @Override - public long getPrevLong(final long rowKey) { - return DateTimeUtils.nanos(alternateColumnSource.getPrev(rowKey)); - } - - @Override - public boolean isImmutable() { - return alternateColumnSource.isImmutable(); - } - - @Override - public boolean allowsReinterpret(@NotNull final Class alternateDataType) { - return alternateDataType == DateTime.class; - } - - @Override - public ColumnSource doReinterpret(@NotNull final Class alternateDataType) throws IllegalArgumentException { - //noinspection unchecked - return (ColumnSource) alternateColumnSource; - } - - private class UnboxingFillContext implements FillContext { - final GetContext alternateGetContext; - - private UnboxingFillContext(final int chunkCapacity, final SharedContext sharedContext) { - alternateGetContext = alternateColumnSource.makeGetContext(chunkCapacity, sharedContext); - } - - @Override - public void close() { - alternateGetContext.close(); - } - } - - @Override - public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { - return new UnboxingFillContext(chunkCapacity, sharedContext); - } - - @Override - public void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - final UnboxingFillContext unboxingFillContext = (UnboxingFillContext) context; - final ObjectChunk dateTimeChunk = alternateColumnSource.getChunk(unboxingFillContext.alternateGetContext, rowSequence).asObjectChunk(); - convertToLong(destination, dateTimeChunk); - } - - @Override - public void fillPrevChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - final UnboxingFillContext unboxingFillContext = (UnboxingFillContext) context; - final ObjectChunk dateTimeChunk = alternateColumnSource.getPrevChunk(unboxingFillContext.alternateGetContext, rowSequence).asObjectChunk(); - convertToLong(destination, dateTimeChunk); - } - - private static void convertToLong(@NotNull final WritableChunk destination, @NotNull final ObjectChunk dateTimeChunk) { - final WritableLongChunk longDestination = destination.asWritableLongChunk(); - for (int ii = 0; ii < dateTimeChunk.size(); ++ii) { - final DateTime dateTime = dateTimeChunk.get(ii); - longDestination.set(ii, DateTimeUtils.nanos(dateTime)); - } - longDestination.setSize(dateTimeChunk.size()); +public class DateTimeAsLongColumnSource extends UnboxedTimeBackedColumnSource { + public DateTimeAsLongColumnSource(ColumnSource alternateColumnSource) { + super(alternateColumnSource); } @Override - public boolean isStateless() { - return alternateColumnSource.isStateless(); + protected long toEpochNano(DateTime val) { + return DateTimeUtils.nanos(val); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java index f4b8a83b4ca..8665cf0cf14 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java @@ -4,267 +4,33 @@ package io.deephaven.engine.table.impl.sources; import io.deephaven.engine.table.impl.DefaultChunkSource; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.time.DateTime; -import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; -import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.time.DateTimeUtils; import org.jetbrains.annotations.NotNull; -import static io.deephaven.util.QueryConstants.NULL_LONG; -import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.*; -import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.IN_USE_MASK; - /** - * Array-backed ColumnSource for DateTimes. Allows reinterpret as long. + * Array-backed ColumnSource for DBDateTimes. Allows reinterpret as long. */ -public class DateTimeSparseArraySource extends AbstractSparseLongArraySource - implements MutableColumnSourceGetDefaults.ForLongAsDateTime, DefaultChunkSource { +public class DateTimeSparseArraySource extends NanosBasedTimeSparseArraySource + implements MutableColumnSourceGetDefaults.ForLongAsDateTime, DefaultChunkSource, ConvertableTimeSource { public DateTimeSparseArraySource() { super(DateTime.class); } - @Override - public void set(long key, DateTime value) { - set(key, value == null ? NULL_LONG : value.getNanos()); - } - - @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == long.class; - } - - - // the ArrayBackedColumnSource fillChunk can't handle changing the type - @Override - public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull RowSequence rowSequence) { - final ChunkFiller filler = ChunkFiller.forChunkType(dest.getChunkType()); - if (rowSequence.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { - filler.fillByRanges(this, rowSequence, dest); - } else { - filler.fillByIndices(this, rowSequence, dest); - } - } - - @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull RowSequence rowSequence) { - final ChunkFiller filler = ChunkFiller.forChunkType(dest.getChunkType()); - if (rowSequence.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { - filler.fillPrevByRanges(this, rowSequence, dest); - } else { - filler.fillPrevByIndices(this, rowSequence, dest); - } + public DateTimeSparseArraySource(final @NotNull LongSparseArraySource nanoSource) { + super(DateTime.class, nanoSource); } @Override - public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - return getChunkByFilling(context, rowSequence); - } - - @Override - public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { - return getPrevChunkByFilling(context, rowSequence); - } - - @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, - @NotNull LongChunk keys) { - final WritableObjectChunk objectChunk = dest.asWritableObjectChunk(); - for (int ii = 0; ii < keys.size();) { - final long firstKey = keys.get(ii); - if (firstKey == RowSequence.NULL_ROW_KEY) { - objectChunk.set(ii++, null); - continue; - } - final long masked = firstKey & ~INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size()) { - final int nextII = lastII + 1; - final long nextKey = keys.get(nextII); - final long nextMasked = nextKey & ~INDEX_MASK; - if (nextMasked != masked) { - break; - } - lastII = nextII; - } - final long[] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - if (block == null) { - objectChunk.fillWithNullValue(ii, lastII - ii + 1); - ii = lastII + 1; - continue; - } - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - final long nanos = block[indexWithinBlock]; - objectChunk.set(ii++, nanos == NULL_LONG ? null : new DateTime(nanos)); - } - } - dest.setSize(keys.size()); - } - - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, - @NotNull LongChunk keys) { - final WritableObjectChunk objectChunk = dest.asWritableObjectChunk(); - for (int ii = 0; ii < keys.size();) { - final long firstKey = keys.get(ii); - if (firstKey == RowSequence.NULL_ROW_KEY) { - objectChunk.set(ii++, null); - continue; - } - final long masked = firstKey & ~INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size()) { - final int nextII = lastII + 1; - final long nextKey = keys.get(nextII); - final long nextMasked = nextKey & ~INDEX_MASK; - if (nextMasked != masked) { - break; - } - lastII = nextII; - } - - final long[] block = blocks.getInnermostBlockByKeyOrNull(firstKey); - if (block == null) { - objectChunk.fillWithNullValue(ii, lastII - ii + 1); - ii = lastII + 1; - continue; - } - - final long[] prevInUse = (prevFlusher == null || this.prevInUse == null) ? null - : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); - final long[] prevBlock = prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; - final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); - - final long[] blockToUse = - (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - final long nanos = blockToUse == null ? NULL_LONG : blockToUse[indexWithinBlock]; - objectChunk.set(ii++, nanos == NULL_LONG ? null : new DateTime(nanos)); - } - } - dest.setSize(keys.size()); + protected DateTime makeValue(long nanos) { + return DateTimeUtils.nanosToTime(nanos); } @Override - public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - final ObjectChunk chunk = src.asObjectChunk(); - final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - int offset = 0; - for (int ii = 0; ii < ranges.size(); ii += 2) { - long firstKey = ranges.get(ii); - final long lastKey = ranges.get(ii + 1); - - while (firstKey <= lastKey) { - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); - final int length = (int) (lastKeyToUse - firstKey + 1); - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long[] block = ensureBlock(block0, block1, block2); - - final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - for (int jj = length - 1; jj >= 0; --jj) { - final long[] prevBlockInner = shouldRecordPrevious(firstKey + jj); - if (prevBlockInner != null) { - prevBlockInner[sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; - } - - final DateTime time = chunk.get(offset + jj); - block[sIndexWithinBlock + jj] = (time == null) ? NULL_LONG : time.getNanos(); - } - - firstKey += length; - offset += length; - } - } - } - - @Override - public void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - final ObjectChunk chunk = src.asObjectChunk(); - final LongChunk keys = rowSequence.asRowKeyChunk(); - - for (int ii = 0; ii < keys.size();) { - final long firstKey = keys.get(ii); - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - int lastII = ii; - while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { - ++lastII; - } - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long[] block = ensureBlock(block0, block1, block2); - while (ii <= lastII) { - final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - final long[] prevBlockInner = shouldRecordPrevious(keys.get(ii)); - if (prevBlockInner != null) { - prevBlockInner[indexWithinBlock] = block[indexWithinBlock]; - } - - final DateTime time = chunk.get(ii++); - block[indexWithinBlock] = (time == null) ? NULL_LONG : time.getNanos(); - } - } - } - - @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull LongChunk keys) { - if (keys.size() == 0) { - return; - } - final ObjectChunk chunk = src.asObjectChunk(); - - final boolean hasPrev = prevFlusher != null; - - if (hasPrev) { - prevFlusher.maybeActivate(); - } - - for (int ii = 0; ii < keys.size();) { - final long firstKey = keys.get(ii); - final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; - final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - - final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; - final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; - final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; - final long[] block = ensureBlock(block0, block1, block2); - - if (chunk.isAlias(block)) { - throw new UnsupportedOperationException("Source chunk is an alias for target data"); - } - - // This conditional with its constant condition should be very friendly to the branch predictor. - - long key = keys.get(ii); - do { - final int indexWithinBlock = (int) (key & INDEX_MASK); - - if (hasPrev) { - final long[] prevBlockInner = shouldRecordPrevious(keys.get(ii)); - if (prevBlockInner != null) { - prevBlockInner[indexWithinBlock] = block[indexWithinBlock]; - } - } - final DateTime time = chunk.get(ii++); - block[indexWithinBlock] = (time == null) ? NULL_LONG : time.getNanos(); - } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); - } + protected long toNanos(DateTime value) { + return DateTimeUtils.nanos(value); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java index 48d530959df..1b74775f1e9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java @@ -17,10 +17,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.DoubleComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -37,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class DoubleArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForDouble { +public class DoubleArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForDouble /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new double[BLOCK_SIZE], null); @@ -79,7 +84,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -168,6 +173,9 @@ public final double getAndSetUnsafe(long index, double newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Double getPrev(long rowKey) { return box(getPrevDouble(rowKey)); @@ -208,7 +216,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -304,13 +312,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableDoubleChunk dest = destGeneric.asWritableDoubleChunk(); + // region chunkDecl + final WritableDoubleChunk chunk = destGeneric.asWritableDoubleChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -318,13 +446,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -332,11 +467,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableDoubleChunk dest = destGeneric.asWritableDoubleChunk(); + // region chunkDecl + final WritableDoubleChunk chunk = destGeneric.asWritableDoubleChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -351,59 +488,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableDoubleChunk dest = destGeneric.asWritableDoubleChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destGeneric.asWritableDoubleChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); } else { final double[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableDoubleChunk dest = destGeneric.asWritableDoubleChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destGeneric.asWritableDoubleChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); continue; } final double[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -424,33 +588,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final double[] inner = blocks[block]; + final double[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -463,22 +630,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final double[] inner = blocks[block]; + final double[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -495,10 +668,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final double[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final double[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -508,21 +681,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -535,10 +718,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final double[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final double[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -548,12 +731,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java index c61dddf306f..29a8d4f0656 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -45,7 +46,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class DoubleSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForDouble { +public class DoubleSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForDouble /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new double[BLOCK_SIZE], null); @@ -408,7 +410,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -483,8 +485,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -520,8 +527,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -531,7 +543,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -542,12 +556,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableDoubleChunk doubleChunk = dest.asWritableDoubleChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - doubleChunk.set(ii++, NULL_DOUBLE); + chunk.set(ii++, NULL_DOUBLE); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -563,25 +582,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final double [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - doubleChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - doubleChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableDoubleChunk doubleChunk = dest.asWritableDoubleChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - doubleChunk.set(ii++, NULL_DOUBLE); + chunk.set(ii++, NULL_DOUBLE); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -598,7 +624,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final double [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - doubleChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -611,7 +637,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final double[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - doubleChunk.set(ii++, blockToUse == null ? NULL_DOUBLE : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_DOUBLE : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -620,11 +648,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -696,11 +729,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -745,7 +783,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -930,7 +976,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -938,7 +986,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -948,7 +999,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public DoubleChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return DoubleChunk.getEmptyChunk(); } @@ -967,7 +1018,7 @@ public DoubleChunk getChunk(@NotNull GetContext context, @NotNull RowSeq // region getPrevChunk @Override - public DoubleChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public DoubleChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java index 3a5ff8b6b5f..a7b4684819e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java @@ -17,10 +17,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.FloatComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -37,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class FloatArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForFloat { +public class FloatArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForFloat /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new float[BLOCK_SIZE], null); @@ -79,7 +84,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -168,6 +173,9 @@ public final float getAndSetUnsafe(long index, float newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Float getPrev(long rowKey) { return box(getPrevFloat(rowKey)); @@ -208,7 +216,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -304,13 +312,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableFloatChunk dest = destGeneric.asWritableFloatChunk(); + // region chunkDecl + final WritableFloatChunk chunk = destGeneric.asWritableFloatChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -318,13 +446,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -332,11 +467,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableFloatChunk dest = destGeneric.asWritableFloatChunk(); + // region chunkDecl + final WritableFloatChunk chunk = destGeneric.asWritableFloatChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -351,59 +488,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableFloatChunk dest = destGeneric.asWritableFloatChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destGeneric.asWritableFloatChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); } else { final float[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableFloatChunk dest = destGeneric.asWritableFloatChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destGeneric.asWritableFloatChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); continue; } final float[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -424,33 +588,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final float[] inner = blocks[block]; + final float[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -463,22 +630,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final float[] inner = blocks[block]; + final float[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -495,10 +668,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final float[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final float[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -508,21 +681,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -535,10 +718,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final float[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final float[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -548,12 +731,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java index 6690232fdc3..b351de18e7b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -45,7 +46,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class FloatSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForFloat { +public class FloatSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForFloat /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new float[BLOCK_SIZE], null); @@ -408,7 +410,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -483,8 +485,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -520,8 +527,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -531,7 +543,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -542,12 +556,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableFloatChunk floatChunk = dest.asWritableFloatChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - floatChunk.set(ii++, NULL_FLOAT); + chunk.set(ii++, NULL_FLOAT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -563,25 +582,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final float [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - floatChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - floatChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableFloatChunk floatChunk = dest.asWritableFloatChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - floatChunk.set(ii++, NULL_FLOAT); + chunk.set(ii++, NULL_FLOAT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -598,7 +624,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final float [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - floatChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -611,7 +637,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final float[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - floatChunk.set(ii++, blockToUse == null ? NULL_FLOAT : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_FLOAT : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -620,11 +648,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -696,11 +729,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -745,7 +783,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -930,7 +976,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -938,7 +986,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -948,7 +999,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public FloatChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return FloatChunk.getEmptyChunk(); } @@ -967,7 +1018,7 @@ public FloatChunk getChunk(@NotNull GetContext context, @NotNull RowSequ // region getPrevChunk @Override - public FloatChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public FloatChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java index 2e9ec1694de..5579307d3f5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java @@ -17,6 +17,8 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.time.Instant; + /** * This is a marker interface for a column source that is entirely within memory; therefore select operations should not * try to copy it into memory a second time. @@ -66,7 +68,9 @@ static WritableColumnSource makeImmutableSource(@NotNull Class dataTyp } else if (dataType == short.class || dataType == Short.class) { result = new ImmutableShortArraySource(); } else if (dataType == DateTime.class) { - result = new WritableLongAsDateTimeColumnSource(new ImmutableLongArraySource()); + result = new ImmutableDateTimeArraySource(); + } else if (dataType == Instant.class) { + result = new ImmutableInstantArraySource(); } else { result = new ImmutableObjectArraySource<>(dataType, componentType); } @@ -95,7 +99,9 @@ static WritableColumnSource makeImmutable2DSource(@NotNull Class dataT } else if (dataType == short.class || dataType == Short.class) { result = new Immutable2DShortArraySource(); } else if (dataType == DateTime.class) { - result = new WritableLongAsDateTimeColumnSource(new Immutable2DLongArraySource()); + result = new Immutable2DDateTimeArraySource(); + } else if (dataType == Instant.class) { + result = new Immutable2DInstantArraySource(); } else { result = new Immutable2DObjectArraySource<>(dataType, componentType); } @@ -127,8 +133,9 @@ static ColumnSource makeImmutableConstantSource( } else if (dataType == short.class || dataType == Short.class) { result = new ImmutableConstantShortSource(TypeUtils.unbox((Short) value)); } else if (dataType == DateTime.class) { - result = new LongAsDateTimeColumnSource( - new ImmutableConstantLongSource(DateTimeUtils.nanos((DateTime) value))); + result = new ImmutableConstantDateTimeSource(DateTimeUtils.nanos((DateTime) value)); + } else if (dataType == Instant.class) { + result = new ImmutableConstantInstantSource(DateTimeUtils.nanos((DateTime) value)); } else { result = new ImmutableConstantObjectSource<>(dataType, componentType, value); } @@ -207,7 +214,9 @@ static ColumnSource getImmutableMemoryColumnSource(@NotNull final Object } else if (dataType == Short.class) { result = new ImmutableShortArraySource(ArrayTypeUtils.getUnboxedArray((Short[]) dataArray)); } else if (dataType == DateTime.class && dataArray instanceof long[]) { - result = new LongAsDateTimeColumnSource(new ImmutableLongArraySource((long[]) dataArray)); + result = new ImmutableDateTimeArraySource((long[]) dataArray); + } else if (dataType == Instant.class && dataArray instanceof long[]) { + result = new ImmutableInstantArraySource((long[]) dataArray); } else { // noinspection unchecked result = new ImmutableObjectArraySource<>(dataType, componentType, (T[]) dataArray); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantArraySource.java new file mode 100644 index 00000000000..72e5e7fc385 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantArraySource.java @@ -0,0 +1,40 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * Array-backed ColumnSource for {@link Instant}s. Allows reinterpretation as long. + */ +public class InstantArraySource extends NanosBasedTimeArraySource + implements MutableColumnSourceGetDefaults.ForLongAsInstant { + public InstantArraySource() { + super(Instant.class); + } + + public InstantArraySource(final @NotNull LongArraySource nanoSource) { + super(Instant.class, nanoSource); + } + + @Override + protected Instant makeValue(long nanos) { + return DateTimeUtils.makeInstant(nanos); + } + + @Override + protected long toNanos(Instant value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toInstant() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantAsLongColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantAsLongColumnSource.java new file mode 100644 index 00000000000..f54b6fcba04 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantAsLongColumnSource.java @@ -0,0 +1,23 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.Instant; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@link Instant} to {@code long} values. + */ +public class InstantAsLongColumnSource extends UnboxedTimeBackedColumnSource { + public InstantAsLongColumnSource(ColumnSource alternateColumnSource) { + super(alternateColumnSource); + } + + @Override + protected long toEpochNano(Instant val) { + return DateTimeUtils.toEpochNano(val); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantSparseArraySource.java new file mode 100644 index 00000000000..f9264225985 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InstantSparseArraySource.java @@ -0,0 +1,36 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.impl.DefaultChunkSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * Sparse Array-backed ColumnSource for {@link Instant}s. Allows reinterpret as long. + */ +public class InstantSparseArraySource extends NanosBasedTimeSparseArraySource + implements MutableColumnSourceGetDefaults.ForLongAsInstant, DefaultChunkSource, ConvertableTimeSource { + public InstantSparseArraySource() { + super(Instant.class); + } + + public InstantSparseArraySource(final @NotNull LongSparseArraySource nanoSource) { + super(Instant.class, nanoSource); + } + + @Override + protected Instant makeValue(long nanos) { + return DateTimeUtils.makeInstant(nanos); + } + + @Override + protected long toNanos(Instant value) { + return DateTimeUtils.toEpochNano(value); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java index 4589ebafd40..3754c9f88e4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java @@ -17,10 +17,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.IntComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -37,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class IntegerArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForInt { +public class IntegerArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForInt /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new int[BLOCK_SIZE], null); @@ -79,7 +84,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -168,6 +173,9 @@ public final int getAndSetUnsafe(long index, int newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Integer getPrev(long rowKey) { return box(getPrevInt(rowKey)); @@ -208,7 +216,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -304,13 +312,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableIntChunk dest = destGeneric.asWritableIntChunk(); + // region chunkDecl + final WritableIntChunk chunk = destGeneric.asWritableIntChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -318,13 +446,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -332,11 +467,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableIntChunk dest = destGeneric.asWritableIntChunk(); + // region chunkDecl + final WritableIntChunk chunk = destGeneric.asWritableIntChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -351,59 +488,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableIntChunk dest = destGeneric.asWritableIntChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destGeneric.asWritableIntChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); } else { final int[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableIntChunk dest = destGeneric.asWritableIntChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destGeneric.asWritableIntChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); continue; } final int[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -424,33 +588,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final int[] inner = blocks[block]; + final int[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -463,22 +630,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final int[] inner = blocks[block]; + final int[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -495,10 +668,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final int[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final int[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -508,21 +681,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -535,10 +718,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final int[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final int[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -548,12 +731,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java index 80e650c58c2..1e4b894d7e0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -45,7 +46,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class IntegerSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForInt { +public class IntegerSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForInt /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new int[BLOCK_SIZE], null); @@ -408,7 +410,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -483,8 +485,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -520,8 +527,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -531,7 +543,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -542,12 +556,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableIntChunk intChunk = dest.asWritableIntChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - intChunk.set(ii++, NULL_INT); + chunk.set(ii++, NULL_INT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -563,25 +582,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final int [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - intChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - intChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableIntChunk intChunk = dest.asWritableIntChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - intChunk.set(ii++, NULL_INT); + chunk.set(ii++, NULL_INT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -598,7 +624,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final int [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - intChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -611,7 +637,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final int[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - intChunk.set(ii++, blockToUse == null ? NULL_INT : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_INT : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -620,11 +648,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -696,11 +729,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -745,7 +783,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -930,7 +976,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -938,7 +986,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -948,7 +999,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public IntChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return IntChunk.getEmptyChunk(); } @@ -967,7 +1018,7 @@ public IntChunk getChunk(@NotNull GetContext context, @NotNull RowSequen // region getPrevChunk @Override - public IntChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public IntChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java new file mode 100644 index 00000000000..dba746f5c6c --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java @@ -0,0 +1,122 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * A {@link ColumnSource} for {@link LocalDate}s that is backed by an inner {@link ColumnSource} of + * {@link ZonedDateTime}. Note that no special handling is done for timezones. The returned local time will be in the + * zone of each inner ZonedDateTime. + */ +public class LocalDateWrapperSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForObject { + private final ColumnSource inner; + private final ZoneId zone; + private final boolean mustInspectZone; + + private class ConvertingFillContext implements ChunkSource.FillContext { + final ChunkSource.FillContext alternateFillContext; + final WritableObjectChunk innerChunk; + + private ConvertingFillContext(final int chunkCapacity, final SharedContext sharedContext) { + alternateFillContext = inner.makeFillContext(chunkCapacity, sharedContext); + innerChunk = WritableObjectChunk.makeWritableChunk(chunkCapacity); + } + + @Override + public void close() { + alternateFillContext.close(); + innerChunk.close(); + } + } + + public LocalDateWrapperSource(ColumnSource inner, ZoneId zone) { + super(LocalDate.class); + this.inner = inner; + this.zone = zone; + mustInspectZone = !(inner instanceof ConvertableTimeSource.Zoned) + || ((ConvertableTimeSource.Zoned) inner).getZone().equals(zone); + } + + @Override + public boolean allowsReinterpret(@NotNull Class alternateDataType) { + return alternateDataType == ZonedDateTime.class || inner.allowsReinterpret(alternateDataType); + } + + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + // noinspection unchecked + return alternateDataType == ZonedDateTime.class + ? (ColumnSource) inner + : inner.reinterpret(alternateDataType); + } + + @Nullable + @Override + public LocalDate get(long index) { + final ZonedDateTime innerVal = adjustZone(inner.get(index)); + return innerVal == null ? null : innerVal.toLocalDate(); + } + + @Nullable + @Override + public LocalDate getPrev(long index) { + final ZonedDateTime innerVal = adjustZone(inner.getPrev(index)); + return innerVal == null ? null : innerVal.toLocalDate(); + } + + @Override + public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { + return new ConvertingFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + final ConvertingFillContext fillContext = (ConvertingFillContext) context; + inner.fillChunk(fillContext.alternateFillContext, fillContext.innerChunk, rowSequence); + convertInnerChunk(dest, fillContext); + } + + @Override + public void fillPrevChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + final ConvertingFillContext fillContext = (ConvertingFillContext) context; + inner.fillPrevChunk(fillContext.alternateFillContext, fillContext.innerChunk, rowSequence); + convertInnerChunk(dest, fillContext); + } + + private void convertInnerChunk(@NotNull WritableChunk dest, ConvertingFillContext fillContext) { + final WritableObjectChunk typedDest = dest.asWritableObjectChunk(); + for (int ii = 0; ii < fillContext.innerChunk.size(); ii++) { + final ZonedDateTime zdt = adjustZone(fillContext.innerChunk.get(ii)); + typedDest.set(ii, zdt == null ? null : zdt.toLocalDate()); + } + typedDest.setSize(fillContext.innerChunk.size()); + } + + private ZonedDateTime adjustZone(final ZonedDateTime input) { + if (!mustInspectZone || input == null || input.getZone().equals(zone)) { + return input; + } + + return input.withZoneSameInstant(zone); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java new file mode 100644 index 00000000000..40b54968026 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java @@ -0,0 +1,122 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * A {@link ColumnSource} for {@link LocalTime}s that is backed by an inner {@link ColumnSource} of + * {@link ZonedDateTime}. Note that no special handling is done for timezones. The returned local time will be in the + * zone of each inner ZonedDateTime. + */ +public class LocalTimeWrapperSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForObject { + private final ColumnSource inner; + private final ZoneId zone; + private final boolean mustInspectZone; + + private class ConvertingFillContext implements ChunkSource.FillContext { + final ChunkSource.FillContext alternateFillContext; + final WritableObjectChunk innerChunk; + + private ConvertingFillContext(final int chunkCapacity, final SharedContext sharedContext) { + alternateFillContext = inner.makeFillContext(chunkCapacity, sharedContext); + innerChunk = WritableObjectChunk.makeWritableChunk(chunkCapacity); + } + + @Override + public void close() { + alternateFillContext.close(); + innerChunk.close(); + } + } + + public LocalTimeWrapperSource(ColumnSource inner, ZoneId zone) { + super(LocalTime.class); + this.inner = inner; + this.zone = zone; + mustInspectZone = !(inner instanceof ConvertableTimeSource.Zoned) + || ((ConvertableTimeSource.Zoned) inner).getZone().equals(zone); + } + + @Override + public boolean allowsReinterpret(@NotNull Class alternateDataType) { + return alternateDataType == ZonedDateTime.class || inner.allowsReinterpret(alternateDataType); + } + + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + // noinspection unchecked + return alternateDataType == ZonedDateTime.class + ? (ColumnSource) inner + : inner.reinterpret(alternateDataType); + } + + @Nullable + @Override + public LocalTime get(long index) { + final ZonedDateTime innerVal = adjustZone(inner.get(index)); + return innerVal == null ? null : innerVal.toLocalTime(); + } + + @Nullable + @Override + public LocalTime getPrev(long index) { + final ZonedDateTime innerVal = adjustZone(inner.getPrev(index)); + return innerVal == null ? null : innerVal.toLocalTime(); + } + + @Override + public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { + return new ConvertingFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + final ConvertingFillContext fillContext = (ConvertingFillContext) context; + inner.fillChunk(fillContext.alternateFillContext, fillContext.innerChunk, rowSequence); + convertInnerChunk(dest, fillContext); + } + + @Override + public void fillPrevChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + final ConvertingFillContext fillContext = (ConvertingFillContext) context; + inner.fillPrevChunk(fillContext.alternateFillContext, fillContext.innerChunk, rowSequence); + convertInnerChunk(dest, fillContext); + } + + private void convertInnerChunk(@NotNull WritableChunk dest, ConvertingFillContext fillContext) { + final WritableObjectChunk typedDest = dest.asWritableObjectChunk(); + for (int ii = 0; ii < fillContext.innerChunk.size(); ii++) { + final ZonedDateTime zdt = adjustZone(fillContext.innerChunk.get(ii)); + typedDest.set(ii, zdt == null ? null : zdt.toLocalTime()); + } + typedDest.setSize(fillContext.innerChunk.size()); + } + + private ZonedDateTime adjustZone(final ZonedDateTime input) { + if (!mustInspectZone || input == null || input.getZone().equals(zone)) { + return input; + } + + return input.withZoneSameInstant(zone); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java index 85f9c6f499d..cabb712d290 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java @@ -1,36 +1,1253 @@ /** * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending */ +/* + * --------------------------------------------------------------------------------------------------------------------- + * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit CharacterArraySource and regenerate + * --------------------------------------------------------------------------------------------------------------------- + */ package io.deephaven.engine.table.impl.sources; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import io.deephaven.base.verify.Require; +import java.time.ZoneId; + +import io.deephaven.time.DateTime; +import io.deephaven.engine.table.impl.util.copy.CopyKernel; + +import gnu.trove.list.array.TIntArrayList; +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.*; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; +import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.util.SoftRecycler; +import io.deephaven.util.compare.LongComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; +import org.jetbrains.annotations.NotNull; + +import java.util.Arrays; + import static io.deephaven.util.QueryConstants.NULL_LONG; import static io.deephaven.util.type.TypeUtils.box; import static io.deephaven.util.type.TypeUtils.unbox; /** * Simple array source for Long. + *

+ * The C-haracterArraySource is replicated to all other types with + * io.deephaven.engine.table.impl.sources.Replicate. + * + * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class LongArraySource extends AbstractLongArraySource { +public class LongArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForLong , ConvertableTimeSource { + private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + () -> new long[BLOCK_SIZE], null); + + private long[][] blocks; + private transient long[][] prevBlocks; public LongArraySource() { super(long.class); + blocks = new long[INITIAL_NUMBER_OF_BLOCKS][]; + maxIndex = INITIAL_MAX_INDEX; } @Override - public void setNull(long key) { - set(key, NULL_LONG); + public void startTrackingPrevValues() { + super.startTrackingPrev(blocks.length); + prevBlocks = new long[blocks.length][]; + } + + @Override + public void ensureCapacity(long capacity, boolean nullFill) { + ensureCapacity(capacity, blocks, prevBlocks, nullFill); + } + + /** + * This version of `prepareForParallelPopulation` will internally call {@link #ensureCapacity(long, boolean)} to + * make sure there is room for the incoming values. + * + * @param changedRows row set in the dense table + */ + @Override + public void prepareForParallelPopulation(RowSet changedRows) { + final long currentStep = LogicalClock.DEFAULT.currentStep(); + if (ensurePreviousClockCycle == currentStep) { + throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); + } + ensurePreviousClockCycle = currentStep; + + if (changedRows.isEmpty()) { + return; + } + + // ensure that this source will have sufficient capacity to store these rows, does not need to be + // null-filled as the values will be immediately written + ensureCapacity(changedRows.lastRowKey() + 1, false); + + if (prevFlusher != null) { + prevFlusher.maybeActivate(); + } else { + // we are not tracking this source yet so we have nothing to do for the previous values + return; + } + + try (final RowSequence.Iterator it = changedRows.getRowSequenceIterator()) { + do { + final long firstKey = it.peekNextKey(); + + final int block = (int) (firstKey >> LOG_BLOCK_SIZE); + + final long[] inUse; + if (prevBlocks[block] == null) { + prevBlocks[block] = recycler.borrowItem(); + prevInUse[block] = inUse = inUseRecycler.borrowItem(); + if (prevAllocated == null) { + prevAllocated = new TIntArrayList(); + } + prevAllocated.add(block); + } else { + inUse = prevInUse[block]; + } + + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + it.getNextRowSequenceThrough(maxKeyInCurrentBlock).forAllRowKeys(key -> { + final int nextIndexWithinBlock = (int) (key & INDEX_MASK); + final int nextIndexWithinInUse = nextIndexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long nextMaskWithinInUse = 1L << nextIndexWithinBlock; + prevBlocks[block][nextIndexWithinBlock] = blocks[block][nextIndexWithinBlock]; + inUse[nextIndexWithinInUse] |= nextMaskWithinInUse; + }); + } while (it.hasMore()); + } } @Override - public void set(long key, Long value) { + public final void set(long key, Long value) { set(key, unbox(value)); } @Override - public Long get(long rowKey) { return box(getLong(rowKey)); } + public final void set(long key, long value) { + final int block = (int) (key >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (key & INDEX_MASK); + if (shouldRecordPrevious(key, prevBlocks, recycler)) { + prevBlocks[block][indexWithinBlock] = blocks[block][indexWithinBlock]; + } + blocks[block][indexWithinBlock] = value; + } + + @Override + public void setNull(long key) { + set(key, NULL_LONG); + } + + @Override + public final long getLong(long rowKey) { + if (rowKey < 0 || rowKey > maxIndex) { + return NULL_LONG; + } + return getUnsafe(rowKey); + } + + public final long getUnsafe(long index) { + final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (index & INDEX_MASK); + return blocks[blockIndex][indexWithinBlock]; + } + + public final long getAndSetUnsafe(long index, long newValue) { + final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (index & INDEX_MASK); + final long oldValue = blocks[blockIndex][indexWithinBlock]; + if (!LongComparisons.eq(oldValue, newValue)) { + if (shouldRecordPrevious(index, prevBlocks, recycler)) { + prevBlocks[blockIndex][indexWithinBlock] = oldValue; + } + blocks[blockIndex][indexWithinBlock] = newValue; + } + return oldValue; + } + + // region getAndAddUnsafe + public final long getAndAddUnsafe(long index, long addend) { + final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (index & INDEX_MASK); + final long oldValue = blocks[blockIndex][indexWithinBlock]; + if (addend != 0) { + if (shouldRecordPrevious(index, prevBlocks, recycler)) { + prevBlocks[blockIndex][indexWithinBlock] = oldValue; + } + blocks[blockIndex][indexWithinBlock] = oldValue + addend; + } + return oldValue; + } + // endregion getAndAddUnsafe @Override public Long getPrev(long rowKey) { return box(getPrevLong(rowKey)); } + + @Override + public final long getPrevLong(long rowKey) { + if (rowKey < 0 || rowKey > maxIndex) { + return NULL_LONG; + } + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); + if (shouldUsePrevious(rowKey)) { + return prevBlocks[blockIndex][indexWithinBlock]; + } else { + return blocks[blockIndex][indexWithinBlock]; + } + } + + @Override + public void shift(long start, long end, long offset) { + if (offset > 0) { + for (long i = (int) end; i >= start; i--) { + set((i + offset), getLong(i)); + } + } else { + for (int i = (int) start; i <= end; i++) { + set((i + offset), getLong(i)); + } + } + } + + public void move(long source, long dest, long length) { + if (prevBlocks != null) { + throw new UnsupportedOperationException(); + } + if (source == dest) { + return; + } + if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { + // TODO (#3359): we can move full blocks! + } + if (source < dest && source + length >= dest) { + for (long ii = length - 1; ii >= 0; ) { + final long sourceKey = source + ii; + final long destKey = dest + ii; + final int sourceBlock = (int) (sourceKey >> LOG_BLOCK_SIZE); + final int sourceIndexWithinBlock = (int) (sourceKey & INDEX_MASK); + + final int destBlock = (int) (destKey >> LOG_BLOCK_SIZE); + final int destIndexWithinBlock = (int) (destKey & INDEX_MASK); + + final int valuesInBothBlocks = Math.min(destIndexWithinBlock + 1, sourceIndexWithinBlock + 1); + final int toMove = (ii + 1) < valuesInBothBlocks ? (int)(ii + 1): valuesInBothBlocks; + + System.arraycopy(blocks[sourceBlock], sourceIndexWithinBlock - toMove + 1, blocks[destBlock], destIndexWithinBlock - toMove + 1, toMove); + ii -= toMove; + } + } else { + for (long ii = 0; ii < length;) { + final long sourceKey = source + ii; + final long destKey = dest + ii; + final int sourceBlock = (int) (sourceKey >> LOG_BLOCK_SIZE); + final int sourceIndexWithinBlock = (int) (sourceKey & INDEX_MASK); + + final int destBlock = (int) (destKey >> LOG_BLOCK_SIZE); + final int destIndexWithinBlock = (int) (destKey & INDEX_MASK); + + final int valuesInBothBlocks = BLOCK_SIZE - Math.max(destIndexWithinBlock, sourceIndexWithinBlock); + final int toMove = (length - ii < valuesInBothBlocks) ? (int)(length - ii): valuesInBothBlocks; + + System.arraycopy(blocks[sourceBlock], sourceIndexWithinBlock, blocks[destBlock], destIndexWithinBlock, toMove); + ii += toMove; + } + } + } + + @Override + final long[] allocateNullFilledBlock(int size) { + final long[] newBlock = new long[size]; + Arrays.fill(newBlock, NULL_LONG); + return newBlock; + } + + @Override + final long[] allocateBlock(int size) { + return new long[size]; + } + + @Override + void resetBlocks(long[][] newBlocks, long[][] newPrev) { + blocks = newBlocks; + prevBlocks = newPrev; + } + + @Override + long[][] getPrevBlocks() { + return prevBlocks; + } + + @Override + SoftRecycler getRecycler() { + return recycler; + } + + @Override + Object getBlock(int blockIndex) { + return blocks[blockIndex]; + } + + @Override + Object getPrevBlock(int blockIndex) { + return prevBlocks[blockIndex]; + } + + @Override + public long resetWritableChunkToBackingStore(@NotNull ResettableWritableChunk chunk, long position) { + Assert.eqNull(prevInUse, "prevInUse"); + final int blockNo = getBlockNo(position); + final long [] backingArray = blocks[blockNo]; + chunk.asResettableWritableLongChunk().resetFromTypedArray(backingArray, 0, BLOCK_SIZE); + return ((long)blockNo) << LOG_BLOCK_SIZE; + } + + @Override + public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChunk chunk, long position) { + Assert.eqNull(prevInUse, "prevInUse"); + final int blockNo = getBlockNo(position); + final long [] backingArray = blocks[blockNo]; + final long firstPosition = ((long) blockNo) << LOG_BLOCK_SIZE; + final int offset = (int)(position - firstPosition); + final int capacity = BLOCK_SIZE - offset; + chunk.asResettableWritableLongChunk().resetFromTypedArray(backingArray, offset, capacity); + return capacity; + } + + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + + public void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence , converter); + return; + } + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + { + long[] block = (long[])getBlock(fromBlock); + for (int ii = 0; ii < sz; ii++) { + chunk.set(ii + destOffset.intValue(), converter.apply(block[ii + fromOffsetInBlock])); + } + } + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + { + long[] block = (long[])getBlock(fromBlock); + for (int ii = 0; ii < sz; ii++) { + chunk.set(ii + destOffset.intValue(), converter.apply(block[ii + fromOffsetInBlock])); + } + } + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + { + long[] block = (long[])getBlock(blockNo); + for (int ii = 0; ii < BLOCK_SIZE; ii++) { + chunk.set(ii + destOffset.intValue(), converter.apply(block[ii + 0])); + } + } + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + { + long[] block = (long[])getBlock(toBlock); + for (int ii = 0; ii < restSz; ii++) { + chunk.set(ii + destOffset.intValue(), converter.apply(block[ii + 0])); + } + } + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + + public void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence , converter); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence , converter); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + long[] baseInput = (long[]) getBlock(blockNo); + long[] overInput = (long[]) getPrevBlock(blockNo); + effectiveContext.copyKernel.conditionalCopy(destination, baseInput, overInput, + inUse, srcOffset, destOffset.intValue(), length); + + int bitsSet = 0; + final int bitsetLen = (length + 63) >> 6; + final int bitsetOffset = srcOffset >> 6; + for (int i = 0; i < bitsetLen; ++i) { + bitsSet += Long.bitCount(inUse[i + bitsetOffset]); + } + final int totalBits = bitsetLen << 6; + final boolean flipBase = bitsSet > totalBits / 2; + + // mem-copy from baseline + for (int ii = 0; ii < length; ++ii) { + chunk.set(destOffset.intValue() + ii, converter.apply((flipBase ? overInput : baseInput)[srcOffset + ii])); + } + + final int srcEndOffset = srcOffset + length; + for (int ii = CopyKernel.Utils.nextSetBit(inUse, srcOffset, srcEndOffset, flipBase); + ii < srcEndOffset; + ii = CopyKernel.Utils.nextSetBit(inUse, ii + 1, srcEndOffset, flipBase)) { + chunk.set(destOffset.intValue() + ii - srcOffset, + converter.apply(flipBase ? baseInput[ii] : overInput[ii])); + } + // endregion conditionalCopy + } else { + // region copyFromArray + { + long[] block = (long[])getBlock(blockNo); + for (int ii = 0; ii < length; ii++) { + chunk.set(ii + destOffset.intValue(), converter.apply(block[ii + srcOffset])); + } + } + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk + @Override + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { + if (rows.size() == 0) { + destGeneric.setSize(0); + return; + } + // region chunkDecl + final WritableLongChunk chunk = destGeneric.asWritableLongChunk(); + // endregion chunkDecl + final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); + rows.forAllRowKeys((final long v) -> { + if (v >= ctx.capForCurrentBlock) { + ctx.currentBlockNo = getBlockNo(v); + ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; + ctx.currentBlock = blocks[ctx.currentBlockNo]; + } + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion + }); + chunk.setSize(ctx.offset); + } + + protected void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + , LongFunction converter) { + if (rows.size() == 0) { + destGeneric.setSize(0); + return; + } + // region chunkDecl + final WritableObjectChunk chunk = destGeneric.asWritableObjectChunk(); + // endregion chunkDecl + final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); + rows.forAllRowKeys((final long v) -> { + if (v >= ctx.capForCurrentBlock) { + ctx.currentBlockNo = getBlockNo(v); + ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; + ctx.currentBlock = blocks[ctx.currentBlockNo]; + } + // region conversion + chunk.set(ctx.offset++,converter.apply( ctx.currentBlock[(int) (v & INDEX_MASK)])); + // endregion conversion + }); + chunk.setSize(ctx.offset); + } + // endregion fillSparseChunk + + // region fillSparsePrevChunk + @Override + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { + final long sz = rows.size(); + if (sz == 0) { + destGeneric.setSize(0); + return; + } + + if (prevFlusher == null) { + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); + return; + } + + // region chunkDecl + final WritableLongChunk chunk = destGeneric.asWritableLongChunk(); + // endregion chunkDecl + final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); + rows.forAllRowKeys((final long v) -> { + if (v >= ctx.capForCurrentBlock) { + ctx.currentBlockNo = getBlockNo(v); + ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; + ctx.currentBlock = blocks[ctx.currentBlockNo]; + ctx.currentPrevBlock = prevBlocks[ctx.currentBlockNo]; + ctx.prevInUseBlock = prevInUse[ctx.currentBlockNo]; + } + + final int indexWithinBlock = (int) (v & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion + }); + chunk.setSize(ctx.offset); + } + + protected void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + , LongFunction converter) { + final long sz = rows.size(); + if (sz == 0) { + destGeneric.setSize(0); + return; + } + + if (prevFlusher == null) { + fillSparseChunk(destGeneric, rows , converter); + return; + } + + // region chunkDecl + final WritableObjectChunk chunk = destGeneric.asWritableObjectChunk(); + // endregion chunkDecl + final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); + rows.forAllRowKeys((final long v) -> { + if (v >= ctx.capForCurrentBlock) { + ctx.currentBlockNo = getBlockNo(v); + ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; + ctx.currentBlock = blocks[ctx.currentBlockNo]; + ctx.currentPrevBlock = prevBlocks[ctx.currentBlockNo]; + ctx.prevInUseBlock = prevInUse[ctx.currentBlockNo]; + } + + final int indexWithinBlock = (int) (v & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; + // region conversion + chunk.set(ctx.offset++,converter.apply( usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock])); + // endregion conversion + }); + chunk.setSize(ctx.offset); + } + // endregion fillSparsePrevChunk + + // region fillSparseChunkUnordered + @Override + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destGeneric.asWritableLongChunk(); + // endregion chunkDecl + final int sz = rows.size(); + for (int ii = 0; ii < sz; ++ii) { + final long fromIndex = rows.get(ii); + if (fromIndex == RowSequence.NULL_ROW_KEY) { + chunk.set(ii, NULL_LONG); + continue; + } + final int blockNo = getBlockNo(fromIndex); + if (blockNo >= blocks.length) { + chunk.set(ii, NULL_LONG); + } else { + final long[] currentBlock = blocks[blockNo]; + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion + } + } + chunk.setSize(sz); + } + + protected void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destGeneric.asWritableObjectChunk(); + // endregion chunkDecl + final int sz = rows.size(); + for (int ii = 0; ii < sz; ++ii) { + final long fromIndex = rows.get(ii); + if (fromIndex == RowSequence.NULL_ROW_KEY) { + chunk.set(ii, null); + continue; + } + final int blockNo = getBlockNo(fromIndex); + if (blockNo >= blocks.length) { + chunk.set(ii, null); + } else { + final long[] currentBlock = blocks[blockNo]; + // region conversion + chunk.set(ii,converter.apply( currentBlock[(int) (fromIndex & INDEX_MASK)])); + // endregion conversion + } + } + chunk.setSize(sz); + } + // endregion fillSparseChunkUnordered + + // region fillSparsePrevChunkUnordered + @Override + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destGeneric.asWritableLongChunk(); + // endregion chunkDecl + final int sz = rows.size(); + for (int ii = 0; ii < sz; ++ii) { + final long fromIndex = rows.get(ii); + if (fromIndex == RowSequence.NULL_ROW_KEY) { + chunk.set(ii, NULL_LONG); + continue; + } + final int blockNo = getBlockNo(fromIndex); + if (blockNo >= blocks.length) { + chunk.set(ii, NULL_LONG); + continue; + } + final long[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion + } + chunk.setSize(sz); + } + + protected void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destGeneric.asWritableObjectChunk(); + // endregion chunkDecl + final int sz = rows.size(); + for (int ii = 0; ii < sz; ++ii) { + final long fromIndex = rows.get(ii); + if (fromIndex == RowSequence.NULL_ROW_KEY) { + chunk.set(ii, null); + continue; + } + final int blockNo = getBlockNo(fromIndex); + if (blockNo >= blocks.length) { + chunk.set(ii, null); + continue; + } + final long[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; + // region conversion + chunk.set(ii,converter.apply( currentBlock[(int) (fromIndex & INDEX_MASK)])); + // endregion conversion + } + chunk.setSize(sz); + } + // endregion fillSparsePrevChunkUnordered + + // region fillFromChunkByRanges + @Override + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { + if (rowSequence.size() == 0) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + int offset = 0; + // This helps us reduce the number of calls to Chunk.isAlias + long[] knownUnaliasedBlock = null; + for (int ii = 0; ii < ranges.size(); ii += 2) { + long firstKey = ranges.get(ii); + final long lastKey = ranges.get(ii + 1); + + while (firstKey <= lastKey) { + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); + final int length = (int) (lastKeyToUse - firstKey + 1); + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); + final long[] block = blocks[block0]; + + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + knownUnaliasedBlock = block; + + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + // this should be vectorized + for (int jj = 0; jj < length; ++jj) { + if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; + } + } + } + + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray + firstKey += length; + offset += length; + } + } + } + + void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + , ToLongFunction converter) { + if (rowSequence.size() == 0) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + int offset = 0; + // This helps us reduce the number of calls to Chunk.isAlias + long[] knownUnaliasedBlock = null; + for (int ii = 0; ii < ranges.size(); ii += 2) { + long firstKey = ranges.get(ii); + final long lastKey = ranges.get(ii + 1); + + while (firstKey <= lastKey) { + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); + final int length = (int) (lastKeyToUse - firstKey + 1); + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); + final long[] block = blocks[block0]; + + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + knownUnaliasedBlock = block; + + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + // this should be vectorized + for (int jj = 0; jj < length; ++jj) { + if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; + } + } + } + + // region copyToTypedArray + for (int jj = 0; jj < length; jj++) { + block[jj + sIndexWithinBlock] = converter.applyAsLong(chunk.get(offset + jj)); + } + // endregion copyToTypedArray + firstKey += length; + offset += length; + } + } + } + // endregion fillFromChunkByRanges + + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { + if (totalLength == 0) { + return; + } + final LongChunk chunk = src.asLongChunk(); + + final long lastKey = firstKey + totalLength - 1; + + while (firstKey <= lastKey) { + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); + final int length = (int) (lastKeyToUse - firstKey + 1); + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); + final long[] block = blocks[block0]; + + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + firstKey += length; + offset += length; + } + } + + // region fillFromChunkByKeys + @Override + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { + if (rowSequence.size() == 0) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + final LongChunk keys = rowSequence.asRowKeyChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { + ++lastII; + } + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final long[] block = blocks[block0]; + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + while (ii <= lastII) { + final long key = keys.get(ii); + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + if (shouldRecordPrevious(key, prevBlocks, recycler)) { + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; + } + } + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion + ++ii; + } + } + } + + void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + , ToLongFunction converter) { + if (rowSequence.size() == 0) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final LongChunk keys = rowSequence.asRowKeyChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { + ++lastII; + } + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final long[] block = blocks[block0]; + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + while (ii <= lastII) { + final long key = keys.get(ii); + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + if (shouldRecordPrevious(key, prevBlocks, recycler)) { + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; + } + } + // region conversion + block[indexWithinBlock] = converter.applyAsLong(chunk.get(ii)); + // endregion conversion + ++ii; + } + } + } + // endregion fillFromChunkByKeys + + // region fillFromChunkUnordered + @Override + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + if (keys.size() == 0) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final long[] block = blocks[block0]; + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + long key = keys.get(ii); + do { + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + if (shouldRecordPrevious(key, prevBlocks, recycler)) { + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; + } + } + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion + ++ii; + } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); + } + } + + public void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + , ToLongFunction converter) { + if (keys.size() == 0) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final long[] block = blocks[block0]; + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + long key = keys.get(ii); + do { + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + if (shouldRecordPrevious(key, prevBlocks, recycler)) { + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; + } + } + // region conversion + block[indexWithinBlock] = converter.applyAsLong(chunk.get(ii)); + // endregion conversion + ++ii; + } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); + } + } + // endregion fillFromChunkUnordered + + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException("Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ZonedDateTimeArraySource(Require.neqNull(zone, "zone"), this); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new DateTimeArraySource(this); + } + + @Override + public ColumnSource toInstant() { + return new InstantArraySource(this); + } + + @Override + public ColumnSource toEpochNano() { + return this; + } + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsDateTimeColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsDateTimeColumnSource.java index db519f05a19..340798eaa4f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsDateTimeColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsDateTimeColumnSource.java @@ -3,143 +3,20 @@ */ package io.deephaven.engine.table.impl.sources; -import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.AbstractColumnSource; -import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.time.DateTime; import io.deephaven.time.DateTimeUtils; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.chunk.*; -import io.deephaven.engine.rowset.RowSequence; -import org.jetbrains.annotations.NotNull; /** - * Reinterpret result {@link ColumnSource} implementations that translates {@link long} to {@code DateTime} values. + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} to {@link DateTime} values. */ -public class LongAsDateTimeColumnSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForObject, FillUnordered { - - private final ColumnSource alternateColumnSource; - +public class LongAsDateTimeColumnSource extends BoxedLongAsTimeSource { public LongAsDateTimeColumnSource(ColumnSource alternateColumnSource) { - super(DateTime.class); - this.alternateColumnSource = alternateColumnSource; - } - - @Override - public DateTime get(final long rowKey) { - final long longValue = alternateColumnSource.getLong(rowKey); - return DateTimeUtils.nanosToTime(longValue); - } - - @Override - public DateTime getPrev(final long rowKey) { - final long longValue = alternateColumnSource.getPrevLong(rowKey); - return DateTimeUtils.nanosToTime(longValue); - } - - @Override - public boolean isImmutable() { - return alternateColumnSource.isImmutable(); - } - - @Override - public boolean allowsReinterpret(@NotNull final Class alternateDataType) { - return alternateDataType == long.class || alternateDataType == Long.class; - } - - @Override - public ColumnSource doReinterpret(@NotNull final Class alternateDataType) throws IllegalArgumentException { - //noinspection unchecked - return (ColumnSource) alternateColumnSource; - } - - private class ToDateTimeFillContext implements FillContext { - final GetContext alternateGetContext; - final FillContext alternateFillContext; - final WritableLongChunk longChunk; - - private ToDateTimeFillContext(final int chunkCapacity, final SharedContext sharedContext) { - alternateGetContext = alternateColumnSource.makeGetContext(chunkCapacity, sharedContext); - if (providesFillUnordered()) { - alternateFillContext = alternateColumnSource.makeFillContext(chunkCapacity, sharedContext); - longChunk = WritableLongChunk.makeWritableChunk(chunkCapacity); - } else { - alternateFillContext = null; - longChunk = null; - } - } - - @Override - public void close() { - alternateGetContext.close(); - if (alternateFillContext != null) { - alternateFillContext.close(); - longChunk.close(); - } - } - } - - @Override - public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { - return new ToDateTimeFillContext(chunkCapacity, sharedContext); - } - - @Override - public void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - final ToDateTimeFillContext toDateTimeFillContext = (ToDateTimeFillContext) context; - final LongChunk longChunk = alternateColumnSource.getChunk(toDateTimeFillContext.alternateGetContext, rowSequence).asLongChunk(); - convertToDateTime(destination, longChunk); - } - - @Override - public void fillPrevChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final RowSequence rowSequence) { - final ToDateTimeFillContext toDateTimeFillContext = (ToDateTimeFillContext) context; - final LongChunk longChunk = alternateColumnSource.getPrevChunk(toDateTimeFillContext.alternateGetContext, rowSequence).asLongChunk(); - convertToDateTime(destination, longChunk); - } - - @Override - public boolean providesFillUnordered() { - return FillUnordered.providesFillUnordered(alternateColumnSource); - } - - @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final ToDateTimeFillContext toDateTimeFillContext = (ToDateTimeFillContext) context; - if (toDateTimeFillContext.longChunk == null) { - throw new UnsupportedOperationException("Unordered fill is not supported by this column source!"); - } - toDateTimeFillContext.longChunk.setSize(keys.size()); - //noinspection unchecked - ((FillUnordered) alternateColumnSource).fillChunkUnordered(toDateTimeFillContext.alternateFillContext, toDateTimeFillContext.longChunk, keys); - convertToDateTime(dest, toDateTimeFillContext.longChunk); - } - - @Override - public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final ToDateTimeFillContext toDateTimeFillContext = (ToDateTimeFillContext) context; - if (toDateTimeFillContext.longChunk == null) { - throw new UnsupportedOperationException("Unordered fill is not supported by this column source!"); - } - toDateTimeFillContext.longChunk.setSize(keys.size()); - //noinspection unchecked - ((FillUnordered) alternateColumnSource).fillPrevChunkUnordered(toDateTimeFillContext.alternateFillContext, toDateTimeFillContext.longChunk, keys); - convertToDateTime(dest, toDateTimeFillContext.longChunk); - } - - private static void convertToDateTime(@NotNull final WritableChunk destination, @NotNull final LongChunk longChunk) { - final WritableObjectChunk dateTimeObjectDestination = destination.asWritableObjectChunk(); - for (int ii = 0; ii < longChunk.size(); ++ii) { - final long longValue = longChunk.get(ii); - dateTimeObjectDestination.set(ii, DateTimeUtils.nanosToTime(longValue)); - } - dateTimeObjectDestination.setSize(longChunk.size()); + super(DateTime.class, alternateColumnSource); } @Override - public boolean isStateless() { - return alternateColumnSource.isStateless(); + protected DateTime makeValue(long val) { + return DateTimeUtils.nanosToTime(val); } -} +} \ No newline at end of file diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsInstantColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsInstantColumnSource.java new file mode 100644 index 00000000000..e57afd08a0b --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsInstantColumnSource.java @@ -0,0 +1,23 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.Instant; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} to {@link Instant} values. + */ +public class LongAsInstantColumnSource extends BoxedLongAsTimeSource { + public LongAsInstantColumnSource(ColumnSource alternateColumnSource) { + super(Instant.class, alternateColumnSource); + } + + @Override + protected Instant makeValue(long val) { + return DateTimeUtils.makeInstant(val); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalDateColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalDateColumnSource.java new file mode 100644 index 00000000000..e63ac3df8ee --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalDateColumnSource.java @@ -0,0 +1,29 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} to {@link LocalDate} values. + */ +public class LongAsLocalDateColumnSource extends BoxedLongAsTimeSource { + private final ZoneId zone; + + public LongAsLocalDateColumnSource(ColumnSource alternateColumnSource, ZoneId zone) { + super(LocalDate.class, alternateColumnSource); + this.zone = zone; + } + + @Override + protected LocalDate makeValue(long val) { + final ZonedDateTime zdt = DateTimeUtils.makeZonedDateTime(val, zone); + return zdt == null ? null : zdt.toLocalDate(); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalTimeColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalTimeColumnSource.java new file mode 100644 index 00000000000..5f2d3e7e18d --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsLocalTimeColumnSource.java @@ -0,0 +1,29 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} to {@link LocalTime} values. + */ +public class LongAsLocalTimeColumnSource extends BoxedLongAsTimeSource { + private final ZoneId zone; + + public LongAsLocalTimeColumnSource(ColumnSource alternateColumnSource, ZoneId zone) { + super(LocalTime.class, alternateColumnSource); + this.zone = zone; + } + + @Override + protected LocalTime makeValue(long val) { + final ZonedDateTime zdt = DateTimeUtils.makeZonedDateTime(val, zone); + return zdt == null ? null : zdt.toLocalTime(); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsZonedDateTimeColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsZonedDateTimeColumnSource.java new file mode 100644 index 00000000000..721afc95ac4 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongAsZonedDateTimeColumnSource.java @@ -0,0 +1,33 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@code long} to {@link ZonedDateTime} values. + */ +public class LongAsZonedDateTimeColumnSource extends BoxedLongAsTimeSource + implements ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public LongAsZonedDateTimeColumnSource(ColumnSource alternateColumnSource, ZoneId zone) { + super(ZonedDateTime.class, alternateColumnSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long val) { + return DateTimeUtils.makeZonedDateTime(val, zone); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java index 964759f2833..a4bd4836a83 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java @@ -1,24 +1,1438 @@ /** * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending */ +/* + * --------------------------------------------------------------------------------------------------------------------- + * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit CharacterSparseArraySource and regenerate + * --------------------------------------------------------------------------------------------------------------------- + */ package io.deephaven.engine.table.impl.sources; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import io.deephaven.base.verify.Require; +import java.time.ZoneId; + +import io.deephaven.time.DateTime; + +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.impl.DefaultGetContext; +import io.deephaven.chunk.*; +import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; +import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.updategraph.UpdateCommitter; +import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.util.SoftRecycler; +import gnu.trove.list.array.TLongArrayList; +import org.apache.commons.lang3.mutable.MutableObject; +import org.jetbrains.annotations.NotNull; +import java.util.Arrays; + +// region boxing imports +import static io.deephaven.util.QueryConstants.NULL_LONG; import static io.deephaven.util.type.TypeUtils.box; import static io.deephaven.util.type.TypeUtils.unbox; +// endregion boxing imports + +import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.*; /** - * Simple array source for Long. + * Sparse array source for Long. + *

+ * The C-haracterSparseArraySource is replicated to all other types with + * io.deephaven.engine.table.impl.sources.Replicate. + * + * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class LongSparseArraySource extends AbstractSparseLongArraySource implements MutableColumnSourceGetDefaults.ForLong { +public class LongSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForLong , ConvertableTimeSource { + // region recyclers + private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + () -> new long[BLOCK_SIZE], null); + private static final SoftRecycler recycler2 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + () -> new long[BLOCK2_SIZE][], null); + private static final SoftRecycler recycler1 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + () -> new LongOneOrN.Block2[BLOCK1_SIZE], null); + private static final SoftRecycler recycler0 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + () -> new LongOneOrN.Block1[BLOCK0_SIZE], null); + // endregion recyclers + + /** + * The presence of a prevFlusher means that this ArraySource wants to track previous values. If prevFlusher is null, + * the ArraySource does not want (or does not yet want) to track previous values. Deserialized ArraySources never + * track previous values. + */ + protected transient UpdateCommitter prevFlusher = null; + + /** + * If ensure previous has been called, we need not check previous values when filling. + */ + private transient long ensurePreviousClockCycle = -1; + + /** + * Our previous page table could be very sparse, and we do not want to read through millions of nulls to find out + * what blocks to recycle. Instead we maintain a list of blocks that we have allocated (as the key shifted by + * BLOCK0_SHIFT). We recycle those blocks in the PrevFlusher; and accumulate the set of blocks that must be + * recycled from the next level array, and so on until we recycle the top-level prevBlocks and prevInUse arrays. + */ + private transient final TLongArrayList blocksToFlush = new TLongArrayList(); + + protected LongOneOrN.Block0 blocks; + protected transient LongOneOrN.Block0 prevBlocks; + + // region constructor public LongSparseArraySource() { super(long.class); + blocks = new LongOneOrN.Block0(); + } + // endregion constructor + + @Override + public void ensureCapacity(long capacity, boolean nullFill) { + // Nothing to do here. Sparse array sources allocate on-demand and always null-fill. + } + + // region setNull + @Override + public void setNull(long key) { + final long [] blocks2 = blocks.getInnermostBlockByKeyOrNull(key); + if (blocks2 == null) { + return; + } + final int indexWithinBlock = (int) (key & INDEX_MASK); + if (blocks2[indexWithinBlock] == NULL_LONG) { + return; + } + + final long [] prevBlocksInner = shouldRecordPrevious(key); + if (prevBlocksInner != null) { + prevBlocksInner[indexWithinBlock] = blocks2[indexWithinBlock]; + } + blocks2[indexWithinBlock] = NULL_LONG; } + // endregion setNull + @Override + public final void set(long key, long value) { + final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; + final int indexWithinBlock = (int) (key & INDEX_MASK); + + final long [] blocksInner = ensureBlock(block0, block1, block2); + final long [] prevBlocksInner = shouldRecordPrevious(key); + if (prevBlocksInner != null) { + prevBlocksInner[indexWithinBlock] = blocksInner[indexWithinBlock]; + } + blocksInner[indexWithinBlock] = value; + } + + @Override + public void shift(final RowSet keysToShift, final long shiftDelta) { + final RowSet.SearchIterator it = (shiftDelta > 0) ? keysToShift.reverseIterator() : keysToShift.searchIterator(); + it.forEachLong((i) -> { + set(i + shiftDelta, getLong(i)); + setNull(i); + return true; + }); + } + + // region boxed methods @Override public void set(long key, Long value) { set(key, unbox(value)); } + + @Override + public Long get(long rowKey) { + return box(getLong(rowKey)); + } + + @Override + public Long getPrev(long rowKey) { + return box(getPrevLong(rowKey)); + } + // endregion boxed methods + + // region primitive get + @Override + public final long getLong(long rowKey) { + if (rowKey < 0) { + return NULL_LONG; + } + return getLongFromBlock(blocks, rowKey); + } + + + @Override + public final long getPrevLong(long rowKey) { + if (rowKey < 0) { + return NULL_LONG; + } + if (shouldUsePrevious(rowKey)) { + return getLongFromBlock(prevBlocks, rowKey); + } + + return getLongFromBlock(blocks, rowKey); + } + + private long getLongFromBlock(LongOneOrN.Block0 blocks, long key) { + final long [] blocks2 = blocks.getInnermostBlockByKeyOrNull(key); + if (blocks2 == null) { + return NULL_LONG; + } + return blocks2[(int)(key & INDEX_MASK)]; + } + // endregion primitive get + + // region allocateNullFilledBlock + @SuppressWarnings("SameParameterValue") + final long [] allocateNullFilledBlock(int size) { + final long [] newBlock = new long[size]; + Arrays.fill(newBlock, NULL_LONG); + return newBlock; + } + // endregion allocateNullFilledBlock + + /** + * Make sure that we have an allocated block at the given point, allocating all of the required parents. + * @return {@code blocks.get(block0).get(block1).get(block2)}, which is non-null. + */ + long [] ensureBlock(final int block0, final int block1, final int block2) { + blocks.ensureIndex(block0, null); + LongOneOrN.Block1 blocks0 = blocks.get(block0); + if (blocks0 == null) { + blocks.set(block0, blocks0 = new LongOneOrN.Block1()); + } + LongOneOrN.Block2 blocks1 = blocks0.get(block1); + if (blocks1 == null) { + blocks0.ensureIndex(block1, null); + blocks0.set(block1, blocks1 = new LongOneOrN.Block2()); + } + + long [] result = blocks1.get(block2); + if (result == null) { + blocks1.ensureIndex(block2, null); + // we do not use the recycler here, because the recycler need not sanitize the block (the inUse recycling + // does that); yet we would like squeaky clean null filled blocks here. + result = allocateNullFilledBlock(BLOCK_SIZE); + blocks1.set(block2, result); + } + return result; + } + + /** + * Make sure that we have an allocated previous and inuse block at the given point, allocating all of the required + * parents. + * @return {@code prevBlocks.get(block0).get(block1).get(block2)}, which is non-null. + */ + private long [] ensurePrevBlock(final long key, final int block0, final int block1, final int block2) { + if (prevBlocks == null) { + prevBlocks = new LongOneOrN.Block0(); + prevInUse = new LongOneOrN.Block0(); + } + prevBlocks.ensureIndex(block0, recycler0); + prevInUse.ensureIndex(block0, inUse0Recycler); + LongOneOrN.Block1 blocks0 = prevBlocks.get(block0); + final LongOneOrN.Block1 inUse0; + if (blocks0 == null) { + prevBlocks.set(block0, blocks0 = new LongOneOrN.Block1()); + prevInUse.set(block0, inUse0 = new LongOneOrN.Block1()); + } else { + inUse0 = prevInUse.get(block0); + } + LongOneOrN.Block2 blocks1 = blocks0.get(block1); + final LongOneOrN.Block2 inUse1; + if (blocks1 == null) { + blocks0.ensureIndex(block1, recycler1); + inUse0.ensureIndex(block1, inUse1Recycler); + blocks0.set(block1, blocks1 = new LongOneOrN.Block2()); + inUse0.set(block1, inUse1 = new LongOneOrN.Block2()); + } else { + inUse1 = inUse0.get(block1); + } + long[] result = blocks1.get(block2); + if (result == null) { + blocks1.ensureIndex(block2, recycler2); + inUse1.ensureIndex(block2, inUse2Recycler); + + blocks1.set(block2, result = recycler.borrowItem()); + inUse1.set(block2, inUseRecycler.borrowItem()); + + blocksToFlush.add(key >> BLOCK2_SHIFT); + } + return result; + } + + @Override + public void startTrackingPrevValues() { + if (prevFlusher != null) { + throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + + this.getClass().getCanonicalName()); + } + prevFlusher = new UpdateCommitter<>(this, LongSparseArraySource::commitUpdates); + } + + private void commitUpdates() { + blocksToFlush.sort(); + + int destinationOffset = 0; + long lastBlock2Key = -1; + + final LongOneOrN.Block0 localPrevBlocks = prevBlocks; + final LongOneOrN.Block0 localPrevInUse = prevInUse; + + if (localPrevBlocks == null) { + assert prevInUse == null; + return; + } + + // there is no reason to allow these to be used anymore; instead we just null them out so that any + // getPrev calls will immediately return get(). + prevInUse = null; + prevBlocks = null; + + // we are clearing out values from block0, block1, block2, block + // we are accumulating values of block0, block1, block2 + for (int ii = 0; ii < blocksToFlush.size(); ii++) { + // blockKey = block0 | block1 | block2 + final long blockKey = blocksToFlush.getQuick(ii); + final long key = blockKey << LOG_BLOCK_SIZE; + final long block2key = key >> BLOCK1_SHIFT; + if (block2key != lastBlock2Key) { + blocksToFlush.set(destinationOffset++, block2key); + lastBlock2Key = block2key; + } + + final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; + + final LongOneOrN.Block2 blocks1 = localPrevBlocks.get(block0).get(block1); + final LongOneOrN.Block2 inUse1 = localPrevInUse.get(block0).get(block1); + final long [] pb = blocks1.get(block2); + final long[] inuse = inUse1.get(block2); + + inUse1.set(block2, null); + blocks1.set(block2, null); + + recycler.returnItem(pb); + inUseRecycler.returnItem(inuse); + } + + blocksToFlush.remove(destinationOffset, blocksToFlush.size() - destinationOffset); + destinationOffset = 0; + long lastBlock1key = -1; + + // we are clearing out values from block0, block1, block2 + // we are accumulating values of block0, block1 + for (int ii = 0; ii < blocksToFlush.size(); ii++) { + final long blockKey = blocksToFlush.getQuick(ii); + // blockKey = block0 | block1 + final long key = blockKey << BLOCK1_SHIFT; + final long block1Key = key >> BLOCK0_SHIFT; + + if (block1Key != lastBlock1key) { + blocksToFlush.set(destinationOffset++, block1Key); + lastBlock1key = block1Key; + } + + final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; + + final LongOneOrN.Block1 blocks0 = localPrevBlocks.get(block0); + final LongOneOrN.Block1 prevs0 = localPrevInUse.get(block0); + final LongOneOrN.Block2 pb2 = blocks0.get(block1); + final LongOneOrN.Block2 inuse = prevs0.get(block1); + + prevs0.set(block1, null); + blocks0.set(block1, null); + + pb2.maybeRecycle(recycler2); + inuse.maybeRecycle(inUse2Recycler); + } + + blocksToFlush.remove(destinationOffset, blocksToFlush.size() - destinationOffset); + + // we are clearing out values from block0, block1 + for (int ii = 0; ii < blocksToFlush.size(); ii++) { + final int block0 = (int) (blocksToFlush.getQuick(ii)) & BLOCK0_MASK; + final LongOneOrN.Block1 pb1 = localPrevBlocks.get(block0); + final LongOneOrN.Block1 inuse = localPrevInUse.get(block0); + + pb1.maybeRecycle(recycler1); + inuse.maybeRecycle(inUse1Recycler); + + localPrevInUse.set(block0, null); + localPrevBlocks.set(block0, null); + } + + blocksToFlush.clear(); + + // and finally recycle the top level block of blocks of blocks of blocks + localPrevBlocks.maybeRecycle(recycler0); + localPrevInUse.maybeRecycle(inUse0Recycler); + } + + /** + * Decides whether to record the previous value. + * @param key the row key to record + * @return If the caller should record the previous value, returns prev inner block, the value + * {@code prevBlocks.get(block0).get(block1).get(block2)}, which is non-null. Otherwise (if the caller should not + * record values), returns null. + */ + final long [] shouldRecordPrevious(final long key) { + // prevFlusher == null means we are not tracking previous values yet (or maybe ever) + if (prevFlusher == null) { + return null; + } + // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + prevFlusher.maybeActivate(); + + final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (key >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (key >> BLOCK2_SHIFT) & BLOCK2_MASK; + + final int indexWithinBlock = (int) (key & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + final long[] prevBlockInner = ensurePrevBlock(key, block0, block1, block2); + final long[] inUse = prevInUse.get(block0).get(block1).get(block2); + + // Set value only if not already in use + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + inUse[indexWithinInUse] |= maskWithinInUse; + return prevBlockInner; + } + return null; + } + + @Override + public void prepareForParallelPopulation(final RowSet changedRows) { + final long currentStep = LogicalClock.DEFAULT.currentStep(); + if (ensurePreviousClockCycle == currentStep) { + throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); + } + ensurePreviousClockCycle = currentStep; + + if (changedRows.isEmpty()) { + return; + } + + if (prevFlusher != null) { + prevFlusher.maybeActivate(); + } + + try (final RowSequence.Iterator it = changedRows.getRowSequenceIterator()) { + do { + final long firstKey = it.peekNextKey(); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long[] block = ensureBlock(block0, block1, block2); + + if (prevFlusher == null) { + it.advance(maxKeyInCurrentBlock + 1); + continue; + } + + final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); + final long[] inUse = prevInUse.get(block0).get(block1).get(block2); + assert inUse != null; + + it.getNextRowSequenceThrough(maxKeyInCurrentBlock).forAllRowKeys(key -> { + final int indexWithinBlock = (int) (key & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + }); + } while (it.hasMore()); + } + } + + /** + * This method supports the 'getPrev' method for its inheritors, doing some of the 'inUse' housekeeping that is + * common to all inheritors. + * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a + * value from its "current" data structure. + */ + private boolean shouldUsePrevious(final long index) { + if (prevFlusher == null) { + return false; + } + + if (prevInUse == null) { + return false; + } + + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + if (inUse == null) { + return false; + } + + final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + return (inUse[indexWithinInUse] & maskWithinInUse) != 0; + } + + // region fillByRanges + @Override + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl + final FillByContext ctx = new FillByContext<>(); + rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { + if (firstKey > ctx.maxKeyInCurrentBlock) { + ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); + ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; + } + while (true) { + final long rightKeyForThisBlock = Math.min(lastKey, ctx.maxKeyInCurrentBlock); + final int length = (int) (rightKeyForThisBlock - firstKey + 1); + if (ctx.block == null) { + chunk.fillWithNullValue(ctx.offset, length); + } else { + final int sIndexWithinBlock = (int)(firstKey & INDEX_MASK); + // for the benefit of code generation. + final int offset = ctx.offset; + final long[] block = ctx.block; + // region copyFromTypedArray + chunk.copyFromTypedArray(block, sIndexWithinBlock, offset, length); + // endregion copyFromTypedArray + } + ctx.offset += length; + firstKey += length; + if (firstKey > lastKey) { + break; + } + ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); + ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; + } + }); + dest.setSize(ctx.offset); + } + + void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl + final FillByContext ctx = new FillByContext<>(); + rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { + if (firstKey > ctx.maxKeyInCurrentBlock) { + ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); + ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; + } + while (true) { + final long rightKeyForThisBlock = Math.min(lastKey, ctx.maxKeyInCurrentBlock); + final int length = (int) (rightKeyForThisBlock - firstKey + 1); + if (ctx.block == null) { + chunk.fillWithNullValue(ctx.offset, length); + } else { + final int sIndexWithinBlock = (int)(firstKey & INDEX_MASK); + // for the benefit of code generation. + final int offset = ctx.offset; + final long[] block = ctx.block; + // region copyFromTypedArray + for (int ii = 0; ii < length; ii++) { + chunk.set(offset + ii, converter.apply(block[sIndexWithinBlock + ii])); + } + // endregion copyFromTypedArray + } + ctx.offset += length; + firstKey += length; + if (firstKey > lastKey) { + break; + } + ctx.block = blocks.getInnermostBlockByKeyOrNull(firstKey); + ctx.maxKeyInCurrentBlock = firstKey | INDEX_MASK; + } + }); + dest.setSize(ctx.offset); + } + // endregion fillByRanges + + // region fillByKeys + @Override + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl + final FillByContext ctx = new FillByContext<>(); + rowSequence.forEachRowKey((final long v) -> { + if (v > ctx.maxKeyInCurrentBlock) { + ctx.block = blocks.getInnermostBlockByKeyOrNull(v); + ctx.maxKeyInCurrentBlock = v | INDEX_MASK; + } + if (ctx.block == null) { + chunk.fillWithNullValue(ctx.offset, 1); + } else { + // region conversion + chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion + } + ++ctx.offset; + return true; + }); + dest.setSize(ctx.offset); + } + + void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl + final FillByContext ctx = new FillByContext<>(); + rowSequence.forEachRowKey((final long v) -> { + if (v > ctx.maxKeyInCurrentBlock) { + ctx.block = blocks.getInnermostBlockByKeyOrNull(v); + ctx.maxKeyInCurrentBlock = v | INDEX_MASK; + } + if (ctx.block == null) { + chunk.fillWithNullValue(ctx.offset, 1); + } else { + // region conversion + chunk.set(ctx.offset,converter.apply( ctx.block[(int) (v & INDEX_MASK)])); + // endregion conversion + } + ++ctx.offset; + return true; + }); + dest.setSize(ctx.offset); + } + // endregion fillByKeys + + // region fillByUnRowSequence + @Override + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + if (firstKey == RowSequence.NULL_ROW_KEY) { + chunk.set(ii++, NULL_LONG); + continue; + } + final long masked = firstKey & ~INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size()) { + final int nextII = lastII + 1; + final long nextKey = keys.get(nextII); + final long nextMasked = nextKey & ~INDEX_MASK; + if (nextMasked != masked) { + break; + } + lastII = nextII; + } + final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + if (block == null) { + chunk.fillWithNullValue(ii, lastII - ii + 1); + ii = lastII + 1; + continue; + } + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion + } + } + dest.setSize(keys.size()); + } + + @Override + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + if (firstKey == RowSequence.NULL_ROW_KEY) { + chunk.set(ii++, NULL_LONG); + continue; + } + final long masked = firstKey & ~INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size()) { + final int nextII = lastII + 1; + final long nextKey = keys.get(nextII); + final long nextMasked = nextKey & ~INDEX_MASK; + if (nextMasked != masked) { + break; + } + lastII = nextII; + } + + final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + if (block == null) { + chunk.fillWithNullValue(ii, lastII - ii + 1); + ii = lastII + 1; + continue; + } + + final long [] prevInUse = (prevFlusher == null || this.prevInUse == null) ? null : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); + final long [] prevBlock = prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + final long[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_LONG : blockToUse[indexWithinBlock]); + // endregion conversion + } + } + dest.setSize(keys.size()); + } + + void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + if (firstKey == RowSequence.NULL_ROW_KEY) { + chunk.set(ii++, null); + continue; + } + final long masked = firstKey & ~INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size()) { + final int nextII = lastII + 1; + final long nextKey = keys.get(nextII); + final long nextMasked = nextKey & ~INDEX_MASK; + if (nextMasked != masked) { + break; + } + lastII = nextII; + } + final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + if (block == null) { + chunk.fillWithNullValue(ii, lastII - ii + 1); + ii = lastII + 1; + continue; + } + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + // region conversion + chunk.set(ii++,converter.apply( block[indexWithinBlock])); + // endregion conversion + } + } + dest.setSize(keys.size()); + } + + + void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + if (firstKey == RowSequence.NULL_ROW_KEY) { + chunk.set(ii++, null); + continue; + } + final long masked = firstKey & ~INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size()) { + final int nextII = lastII + 1; + final long nextKey = keys.get(nextII); + final long nextMasked = nextKey & ~INDEX_MASK; + if (nextMasked != masked) { + break; + } + lastII = nextII; + } + + final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + if (block == null) { + chunk.fillWithNullValue(ii, lastII - ii + 1); + ii = lastII + 1; + continue; + } + + final long [] prevInUse = (prevFlusher == null || this.prevInUse == null) ? null : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); + final long [] prevBlock = prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + final long[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; + // region conversion + chunk.set(ii++,converter.apply( blockToUse == null ? null : blockToUse[indexWithinBlock])); + // endregion conversion + } + } + dest.setSize(keys.size()); + } + // endregion fillByUnRowSequence + + // region fillFromChunkByRanges + @Override + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { + if (rowSequence.isEmpty()) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + int offset = 0; + // This helps us reduce the number of calls to Chunk.isAlias + long[] knownUnaliasedBlock = null; + for (int ii = 0; ii < ranges.size(); ii += 2) { + long firstKey = ranges.get(ii); + final long lastKey = ranges.get(ii + 1); + + while (firstKey <= lastKey) { + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); + final int length = (int) (lastKeyToUse - firstKey + 1); + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + knownUnaliasedBlock = block; + + final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); + final long[] inUse = prevInUse.get(block0).get(block1).get(block2); + + assert inUse != null; + assert prevBlock != null; + + for (int jj = 0; jj < length; ++jj) { + final int indexWithinBlock = sIndexWithinBlock + jj; + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + } + + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray + + firstKey += length; + offset += length; + } + } + } + + void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + , ToLongFunction converter) { + if (rowSequence.isEmpty()) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + int offset = 0; + // This helps us reduce the number of calls to Chunk.isAlias + long[] knownUnaliasedBlock = null; + for (int ii = 0; ii < ranges.size(); ii += 2) { + long firstKey = ranges.get(ii); + final long lastKey = ranges.get(ii + 1); + + while (firstKey <= lastKey) { + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final long lastKeyToUse = Math.min(maxKeyInCurrentBlock, lastKey); + final int length = (int) (lastKeyToUse - firstKey + 1); + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + knownUnaliasedBlock = block; + + final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); + final long[] inUse = prevInUse.get(block0).get(block1).get(block2); + + assert inUse != null; + assert prevBlock != null; + + for (int jj = 0; jj < length; ++jj) { + final int indexWithinBlock = sIndexWithinBlock + jj; + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + } + + // region copyToTypedArray + for (int jj = 0; jj < length; jj++) { + block[jj + sIndexWithinBlock] = converter.applyAsLong(chunk.get(offset + jj)); + } + // endregion copyToTypedArray + + firstKey += length; + offset += length; + } + } + } + // endregion fillFromChunkByRanges + + // region fillFromChunkByKeys + @Override + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { + if (rowSequence.isEmpty()) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + final LongChunk keys = rowSequence.asRowKeyChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep();; + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { + ++lastII; + } + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + // This conditional with its constant condition should be very friendly to the branch predictor. + final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; + final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; + + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + assert inUse != null; + assert prevBlock != null; + + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion + ++ii; + } + } + } + + void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + , ToLongFunction converter) { + if (rowSequence.isEmpty()) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final LongChunk keys = rowSequence.asRowKeyChunk(); + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep();; + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + int lastII = ii; + while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) { + ++lastII; + } + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + // This conditional with its constant condition should be very friendly to the branch predictor. + final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; + final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; + + while (ii <= lastII) { + final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (trackPrevious) { + assert inUse != null; + assert prevBlock != null; + + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + // region conversion + block[indexWithinBlock] = converter.applyAsLong(chunk.get(ii)); + // endregion conversion + ++ii; + } + } + } + // endregion fillFromChunkByKeys + + // region nullByRanges + @Override + void nullByRanges(@NotNull final RowSequence rowSequence) { + if (rowSequence.isEmpty()) { + return; + } + + final boolean hasPrev = prevFlusher != null; + + if (hasPrev) { + prevFlusher.maybeActivate(); + } + + try (RowSequence.Iterator okIt = rowSequence.getRowSequenceIterator()) { + while (okIt.hasMore()) { + final long firstKey = okIt.peekNextKey(); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final RowSequence blockOk = okIt.getNextRowSequenceThrough(maxKeyInCurrentBlock); + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + + if (block == null) { + continue; + } + + blockOk.forAllRowKeyRanges((s, e) -> { + final int length = (int)((e - s) + 1); + + final int sIndexWithinBlock = (int) (s & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (hasPrev) { + boolean prevRequired = false; + for (int jj = 0; jj < length; ++jj) { + final int indexWithinBlock = sIndexWithinBlock + jj; + if (block[indexWithinBlock] != NULL_LONG) { + prevRequired = true; + break; + } + } + + if (prevRequired) { + final long[] prevBlock = ensurePrevBlock(firstKey, block0, block1, block2); + final long[] inUse = prevInUse.get(block0).get(block1).get(block2); + + assert inUse != null; + assert prevBlock != null; + + for (int jj = 0; jj < length; ++jj) { + final int indexWithinBlock = sIndexWithinBlock + jj; + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + + Arrays.fill(block, sIndexWithinBlock, sIndexWithinBlock + length, NULL_LONG); + } + } else { + Arrays.fill(block, sIndexWithinBlock, sIndexWithinBlock + length, NULL_LONG); + } + }); + } + } + } + // endregion nullByRanges + + // region nullByKeys + @Override + void nullByKeys(@NotNull final RowSequence rowSequence) { + if (rowSequence.isEmpty()) { + return; + } + + final boolean hasPrev = prevFlusher != null; + + if (hasPrev) { + prevFlusher.maybeActivate(); + } + + try (RowSequence.Iterator okIt = rowSequence.getRowSequenceIterator()) { + while (okIt.hasMore()) { + final long firstKey = okIt.peekNextKey(); + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + final RowSequence blockOk = okIt.getNextRowSequenceThrough(maxKeyInCurrentBlock); + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long[] block = blocks.getInnermostBlockByKeyOrNull(firstKey); + if (block == null) { + continue; + } + + MutableObject prevBlock = new MutableObject<>(); + MutableObject inUse = new MutableObject<>(); + + blockOk.forAllRowKeys(key -> { + + final int indexWithinBlock = (int) (key & INDEX_MASK); + // This 'if' with its constant condition should be very friendly to the branch predictor. + if (hasPrev) { + + final long oldValue = block[indexWithinBlock]; + if (oldValue != NULL_LONG) { + if (prevBlock.getValue() == null) { + prevBlock.setValue(ensurePrevBlock(firstKey, block0, block1, block2)); + inUse.setValue(prevInUse.get(block0).get(block1).get(block2)); + } + + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse.getValue()[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock.getValue()[indexWithinBlock] = oldValue; + inUse.getValue()[indexWithinInUse] |= maskWithinInUse; + } + } + } + block[indexWithinBlock] = NULL_LONG; + }); + } + } + } + // endregion nullByKeys + + // region fillFromChunkUnordered + @Override + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + if (keys.size() == 0) { + return; + } + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep();; + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + // This conditional with its constant condition should be very friendly to the branch predictor. + final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; + final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; + + long key = keys.get(ii); + do { + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + assert inUse != null; + + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion + ++ii; + } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); + } + } + + public void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + , ToLongFunction converter) { + if (keys.size() == 0) { + return; + } + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep();; + + if (trackPrevious) { + prevFlusher.maybeActivate(); + } + + for (int ii = 0; ii < keys.size(); ) { + final long firstKey = keys.get(ii); + final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; + final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; + + final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK; + final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK; + final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK; + final long [] block = ensureBlock(block0, block1, block2); + + if (chunk.isAlias(block)) { + throw new UnsupportedOperationException("Source chunk is an alias for target data"); + } + + // This conditional with its constant condition should be very friendly to the branch predictor. + final long[] prevBlock = trackPrevious ? ensurePrevBlock(firstKey, block0, block1, block2) : null; + final long[] inUse = trackPrevious ? prevInUse.get(block0).get(block1).get(block2) : null; + + long key = keys.get(ii); + do { + final int indexWithinBlock = (int) (key & INDEX_MASK); + + if (trackPrevious) { + assert inUse != null; + + final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; + final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); + + if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) { + prevBlock[indexWithinBlock] = block[indexWithinBlock]; + inUse[indexWithinInUse] |= maskWithinInUse; + } + } + // region conversion + block[indexWithinBlock] = converter.applyAsLong(chunk.get(ii)); + // endregion conversion + ++ii; + } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); + } + } + // endregion fillFromChunkUnordered + + @Override + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { + if (prevFlusher == null) { + fillChunk(context, dest, rowSequence); + return; + } + defaultFillPrevChunk(context, dest, rowSequence); + } + + // region getChunk + @Override + public LongChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { + if (rowSequence.isEmpty()) { + return LongChunk.getEmptyChunk(); + } + final long firstKey = rowSequence.firstRowKey(); + final long lastKey = rowSequence.lastRowKey(); + if ((lastKey - firstKey + 1) == rowSequence.size() && (firstKey >> BLOCK2_SHIFT == lastKey >> BLOCK2_SHIFT)) { + // it's a contiguous range, in a single block + return DefaultGetContext.resetChunkFromArray(context, + blocks.getInnermostBlockByKeyOrNull(firstKey), + (int) (firstKey & INDEX_MASK), + (int) rowSequence.size()); + } + return getChunkByFilling(context, rowSequence).asLongChunk(); + } + // endregion getChunk + + // region getPrevChunk + @Override + public LongChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { + if (prevFlusher == null) { + return getChunk(context, rowSequence); + } + return getPrevChunkByFilling(context, rowSequence).asLongChunk(); + } + // endregion getPrevChunk + + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException("Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ZonedDateTimeSparseArraySource(Require.neqNull(zone, "zone"), this); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new DateTimeSparseArraySource(this); + } + + @Override + public ColumnSource toInstant() { + return new InstantSparseArraySource(this); + } + + @Override + public ColumnSource toEpochNano() { + return this; + } + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java new file mode 100644 index 00000000000..3ad9829dba5 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java @@ -0,0 +1,286 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.base.verify.Require; +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.ResettableWritableChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.time.DateTime; +import io.deephaven.util.SoftRecycler; +import org.jetbrains.annotations.NotNull; + +import java.time.*; + +public abstract class NanosBasedTimeArraySource extends ArraySourceHelper + implements WritableColumnSource, ConvertableTimeSource { + + protected final LongArraySource nanoSource; + + public NanosBasedTimeArraySource(final @NotNull Class type) { + this(type, new LongArraySource()); + } + + public NanosBasedTimeArraySource(final @NotNull Class type, final @NotNull LongArraySource nanoSource) { + super(type); + this.nanoSource = nanoSource; + } + + // region Getters & Setters + protected abstract TIME_TYPE makeValue(final long nanos); + + protected abstract long toNanos(final TIME_TYPE value); + + @Override + public void set(long key, TIME_TYPE value) { + nanoSource.set(key, toNanos(value)); + } + + @Override + public void set(long key, long value) { + nanoSource.set(key, value); + } + + @Override + public void setNull(long key) { + nanoSource.setNull(key); + } + + @Override + public TIME_TYPE get(long rowKey) { + return makeValue(getLong(rowKey)); + } + + @Override + public TIME_TYPE getPrev(long rowKey) { + return makeValue(getPrevLong(rowKey)); + } + + @Override + public long getLong(long rowKey) { + return nanoSource.getLong(rowKey); + } + + @Override + public long getPrevLong(long rowKey) { + return nanoSource.getPrevLong(rowKey); + } + + public final long getAndSetUnsafe(long index, long newValue) { + return nanoSource.getAndSetUnsafe(index, newValue); + } + + @Override + public void shift(long start, long end, long offset) { + nanoSource.shift(start, end, offset); + } + // endregion + + // region ArraySource impl + @Override + public void startTrackingPrevValues() { + nanoSource.startTrackingPrevValues(); + } + + @Override + long[] allocateNullFilledBlock(int size) { + return nanoSource.allocateNullFilledBlock(size); + } + + @Override + long[] allocateBlock(int size) { + return nanoSource.allocateBlock(size); + } + + @Override + void resetBlocks(long[][] newBlocks, long[][] newPrev) { + nanoSource.resetBlocks(newBlocks, newPrev); + } + + @Override + long[][] getPrevBlocks() { + return nanoSource.getPrevBlocks(); + } + + @Override + SoftRecycler getRecycler() { + return nanoSource.getRecycler(); + } + + @Override + public void ensureCapacity(long size, boolean nullFill) { + nanoSource.ensureCapacity(size, nullFill); + } + + @Override + Object getBlock(int blockIndex) { + return nanoSource.getBlock(blockIndex); + } + + @Override + Object getPrevBlock(int blockIndex) { + return nanoSource.getPrevBlock(blockIndex); + } + + @Override + public boolean exposesChunkedBackingStore() { + return false; + } + + @Override + public long resetWritableChunkToBackingStore(@NotNull ResettableWritableChunk chunk, long position) { + throw new UnsupportedOperationException(); + } + + @Override + public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChunk chunk, long position) { + throw new UnsupportedOperationException(); + } + // endregion + + // region Chunking + @Override + public void fillChunk(@NotNull ChunkSource.FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + nanoSource.fillChunk(context, dest, rowSequence, this::makeValue); + } + + @Override + public void fillPrevChunk( + @NotNull ColumnSource.FillContext context, + @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + nanoSource.fillSparsePrevChunk(destination, rowSequence, this::makeValue); + } else { + nanoSource.fillPrevChunk(context, destination, rowSequence, this::makeValue); + } + } + + @Override + public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getChunkByFilling(context, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getPrevChunkByFilling(context, rowSequence); + } + + @Override + protected void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence indices) { + nanoSource.fillSparseChunk(destGeneric, indices, this::makeValue); + } + + @Override + protected void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence indices) { + nanoSource.fillSparsePrevChunk(destGeneric, indices, this::makeValue); + } + + @Override + protected void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk indices) { + nanoSource.fillSparseChunkUnordered(destGeneric, indices, this::makeValue); + } + + @Override + protected void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk indices) { + nanoSource.fillSparsePrevChunkUnordered(destGeneric, indices, this::makeValue); + } + + @Override + public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + nanoSource.fillFromChunkByRanges(rowSequence, src, this::toNanos); + } + + @Override + public void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); + } + + @Override + public void fillFromChunkUnordered( + @NotNull FillFromContext context, + @NotNull Chunk src, + @NotNull LongChunk keys) { + nanoSource.fillFromChunkUnordered(context, src, keys, this::toNanos); + } + // endregion + + // region Reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class + || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException( + "Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ZonedDateTimeArraySource(Require.neqNull(zone, "zone"), nanoSource); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new DateTimeArraySource(nanoSource); + } + + @Override + public ColumnSource toInstant() { + return new InstantArraySource(nanoSource); + } + + @Override + public LongArraySource toEpochNano() { + return nanoSource; + } + // endregion +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java new file mode 100644 index 00000000000..f231335870d --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java @@ -0,0 +1,228 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.base.verify.Require; +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.DefaultChunkSource; +import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; +import io.deephaven.time.DateTime; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Array-backed ColumnSource for TIME_TYPEs. Allows reinterpret as long. + */ +public abstract class NanosBasedTimeSparseArraySource extends SparseArrayColumnSource + implements DefaultChunkSource, ConvertableTimeSource { + + protected final LongSparseArraySource nanoSource; + + public NanosBasedTimeSparseArraySource(final @NotNull Class type) { + this(type, new LongSparseArraySource()); + } + + public NanosBasedTimeSparseArraySource(final @NotNull Class type, + final @NotNull LongSparseArraySource nanoSource) { + super(type); + this.nanoSource = nanoSource; + } + + @Override + public void ensureCapacity(final long capacity, final boolean nullFilled) { + nanoSource.ensureCapacity(capacity, nullFilled); + } + + @Override + public void prepareForParallelPopulation(final RowSet rowSet) { + nanoSource.prepareForParallelPopulation(rowSet); + } + + // region Getters & Setters + protected abstract TIME_TYPE makeValue(final long nanos); + + protected abstract long toNanos(final TIME_TYPE value); + + @Override + public void set(long key, TIME_TYPE value) { + nanoSource.set(key, toNanos(value)); + } + + @Override + public void set(long key, long value) { + nanoSource.set(key, value); + } + + @Override + public void setNull(long key) { + nanoSource.setNull(key); + } + + @Override + public TIME_TYPE get(long rowKey) { + return makeValue(getLong(rowKey)); + } + + @Override + public TIME_TYPE getPrev(long rowKey) { + return makeValue(getPrevLong(rowKey)); + } + + @Override + public long getLong(long rowKey) { + return nanoSource.getLong(rowKey); + } + + @Override + public long getPrevLong(long rowKey) { + return nanoSource.getPrevLong(rowKey); + } + + @Override + public void shift(final RowSet keysToShift, long shiftDelta) { + nanoSource.shift(keysToShift, shiftDelta); + } + // endregion + + // region SparseArraySource impl + @Override + public void startTrackingPrevValues() { + nanoSource.startTrackingPrevValues(); + } + // endregion + + // region Chunking + @Override + public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getChunkByFilling(context, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getPrevChunkByFilling(context, rowSequence); + } + + @Override + void fillByUnRowSequence(@NotNull WritableChunk dest, + @NotNull LongChunk keys) { + nanoSource.fillByUnRowSequence(dest, keys, this::makeValue); + } + + @Override + void fillPrevByUnRowSequence(@NotNull WritableChunk dest, + @NotNull LongChunk keys) { + nanoSource.fillPrevByUnRowSequence(dest, keys, this::makeValue); + } + + @Override + public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + nanoSource.fillFromChunkByRanges(rowSequence, src, this::toNanos); + } + + @Override + public void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); + } + + @Override + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { + nanoSource.fillFromChunkUnordered(context, src, keys, this::toNanos); + } + + @Override + void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + nanoSource.fillByRanges(dest, rowSequence, this::makeValue); + } + + @Override + void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + nanoSource.fillByKeys(dest, rowSequence, this::makeValue); + } + + @Override + void nullByRanges(@NotNull RowSequence rowSequence) { + nanoSource.nullByRanges(rowSequence); + } + + @Override + void nullByKeys(@NotNull RowSequence rowSequence) { + nanoSource.nullByKeys(rowSequence); + } + // endregion + + // region Reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class + || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException( + "Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ZonedDateTimeSparseArraySource(Require.neqNull(zone, "zone"), nanoSource); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new DateTimeSparseArraySource(nanoSource); + } + + @Override + public ColumnSource toInstant() { + return new InstantSparseArraySource(nanoSource); + } + + @Override + public ColumnSource toEpochNano() { + return nanoSource; + } + // endregion +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java index 14914d5a5f7..39d763ad72c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -42,7 +43,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject { +public class ObjectSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForObject /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new Object[BLOCK_SIZE], block -> Arrays.fill(block, null)); // we'll hold onto previous values, fix that @@ -395,7 +397,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -470,8 +472,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -507,8 +514,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -518,7 +530,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -529,12 +543,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk ObjectChunk = dest.asWritableObjectChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - ObjectChunk.set(ii++, null); + chunk.set(ii++, null); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -550,25 +569,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final T [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - ObjectChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - ObjectChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk ObjectChunk = dest.asWritableObjectChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - ObjectChunk.set(ii++, null); + chunk.set(ii++, null); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -585,7 +611,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final T [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - ObjectChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -598,7 +624,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final T [] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - ObjectChunk.set(ii++, blockToUse == null ? null : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? null : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -607,11 +635,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -683,11 +716,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -732,7 +770,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -917,7 +963,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -925,7 +973,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -935,7 +986,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ObjectChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return ObjectChunk.getEmptyChunk(); } @@ -954,7 +1005,7 @@ public ObjectChunk getChunk(@NotNull GetContext context, @NotNull Row // region getPrevChunk @Override - public ObjectChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ObjectChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/RedirectedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/RedirectedColumnSource.java index 1454a3a98ac..f2a286710f4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/RedirectedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/RedirectedColumnSource.java @@ -3,12 +3,14 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.base.text.Convert; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.util.RowRedirection; +import io.deephaven.time.DateTime; import io.deephaven.util.BooleanUtils; import io.deephaven.engine.table.impl.join.dupexpand.DupExpandKernel; import io.deephaven.engine.table.impl.sort.permute.PermuteKernel; @@ -25,6 +27,13 @@ import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.attributes.Values; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + import static io.deephaven.util.QueryConstants.*; /** @@ -34,7 +43,7 @@ * @param */ public class RedirectedColumnSource extends AbstractDeferredGroupingColumnSource - implements UngroupableColumnSource { + implements UngroupableColumnSource, ConvertableTimeSource { /** * Redirect the innerSource if it is not agnostic to redirection. Otherwise, return the innerSource. * @@ -366,17 +375,77 @@ public void releaseCachedResources() { @Override public boolean allowsReinterpret( @NotNull final Class alternateDataType) { + if ((alternateDataType == long.class + || alternateDataType == Long.class + || alternateDataType == DateTime.class + || alternateDataType == Instant.class) + && supportsTimeConversion()) { + return true; + } + return innerSource.allowsReinterpret(alternateDataType); } + @Override + public boolean supportsTimeConversion() { + return innerSource instanceof ConvertableTimeSource + && ((ConvertableTimeSource) innerSource).supportsTimeConversion(); + } + + @Override + public ColumnSource toEpochNano() { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toEpochNano()); + } + + @Override + public ColumnSource toDateTime() { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toDateTime()); + } + + @Override + public ColumnSource toInstant() { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toInstant()); + } + + @Override + public ColumnSource toZonedDateTime(ZoneId zone) { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toZonedDateTime(zone)); + } + + @Override + public ColumnSource toLocalDate(ZoneId zone) { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toLocalDate(zone)); + } + + @Override + public ColumnSource toLocalTime(ZoneId zone) { + return new RedirectedColumnSource<>(this.rowRedirection, ((ConvertableTimeSource) innerSource) + .toLocalTime(zone)); + } + + @SuppressWarnings("unchecked") @Override protected ColumnSource doReinterpret( @NotNull Class alternateDataType) { if (TypeUtils.getUnboxedTypeIfBoxed(alternateDataType) == byte.class && getType() == Boolean.class) { return new ReinterpretToOriginalForBoolean<>(alternateDataType); } - // noinspection unchecked - return new ReinterpretToOriginal(alternateDataType); + + if (supportsTimeConversion()) { + if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + } + return new ReinterpretToOriginal<>(alternateDataType); } private class ReinterpretToOriginal diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java index aa3fcfb0fe0..a454abd08cb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java @@ -8,6 +8,10 @@ import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.function.Consumer; + public class ReinterpretUtils { /** @@ -58,6 +62,42 @@ public static ColumnSource booleanToByteSource(ColumnSource source) { } } + /** + * Given an {@link Instant} column source turn it into a long column source, either via reinterpretation or + * wrapping. + * + * @param source the source to turn into a long source + * + * @return the long source + */ + @NotNull + public static ColumnSource instantToLongSource(final @NotNull ColumnSource source) { + if (source.allowsReinterpret(long.class)) { + return source.reinterpret(long.class); + } else { + // noinspection unchecked + return new InstantAsLongColumnSource((ColumnSource) source); + } + } + + /** + * Given a {@link ZonedDateTime} column source turn it into a long column source, either via reinterpretation or + * wrapping. + * + * @param source the source to turn into a long source + * + * @return the long source + */ + @NotNull + public static ColumnSource zonedDateTimeToLongSource(final @NotNull ColumnSource source) { + if (source.allowsReinterpret(long.class)) { + return source.reinterpret(long.class); + } else { + // noinspection unchecked + return new ZonedDateTimeAsLongSource((ColumnSource) source); + } + } + /** * If source is something that we prefer to handle as a primitive, do the appropriate conversion. * @@ -71,6 +111,12 @@ public static ColumnSource maybeConvertToPrimitive(ColumnSource source) { if (source.getType() == DateTime.class) { return dateTimeToLongSource(source); } + if (source.getType() == Instant.class) { + return instantToLongSource(source); + } + if (source.getType() == ZonedDateTime.class) { + return zonedDateTimeToLongSource(source); + } return source; } @@ -85,7 +131,7 @@ public static ChunkType maybeConvertToPrimitiveChunkType(@NotNull final Class if (dataType == Boolean.class || dataType == boolean.class) { return ChunkType.Byte; } - if (dataType == DateTime.class) { + if (dataType == DateTime.class || dataType == Instant.class || dataType == ZonedDateTime.class) { return ChunkType.Long; } return ChunkType.fromElementType(dataType); @@ -102,7 +148,7 @@ public static Class maybeConvertToPrimitiveDataType(@NotNull final Class d if (dataType == Boolean.class || dataType == boolean.class) { return byte.class; } - if (dataType == DateTime.class) { + if (dataType == DateTime.class || dataType == Instant.class || dataType == ZonedDateTime.class) { return long.class; } return dataType; @@ -115,26 +161,35 @@ public static Class maybeConvertToPrimitiveDataType(@NotNull final Class d * @param source The source to convert * @return Reinterpret or box source back to the original type if possible */ - public static ColumnSource convertToOriginal(@NotNull final Class originalType, + public static ColumnSource convertToOriginal( + @NotNull final Class originalType, @NotNull final ColumnSource source) { - if (originalType == Boolean.class) { - if (source.getType() != byte.class) { + + final Consumer> validateSourceType = expectedType -> { + if (source.getType() != expectedType) { throw new UnsupportedOperationException( - "Cannot convert column of type " + source.getType() + " to Boolean"); + "Cannot convert column of type " + source.getType() + " to " + originalType); } + }; + + if (originalType == Boolean.class) { + validateSourceType.accept(byte.class); // noinspection unchecked return source.allowsReinterpret(Boolean.class) ? source.reinterpret(Boolean.class) : new BoxedColumnSource.OfBoolean((ColumnSource) source); } if (originalType == DateTime.class) { - if (source.getType() != long.class) { - throw new UnsupportedOperationException( - "Cannot convert column of type " + source.getType() + " to DateTime"); - } + validateSourceType.accept(long.class); // noinspection unchecked return source.allowsReinterpret(DateTime.class) ? source.reinterpret(DateTime.class) : new BoxedColumnSource.OfDateTime((ColumnSource) source); } + if (originalType == Instant.class) { + validateSourceType.accept(long.class); + // noinspection unchecked + return source.allowsReinterpret(Instant.class) ? source.reinterpret(Instant.class) + : new BoxedColumnSource.OfInstant((ColumnSource) source); + } throw new UnsupportedOperationException("Unsupported original type " + originalType); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java index aff545b28fb..195a2dd71d6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java @@ -17,10 +17,14 @@ import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.ShortComparisons; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -37,7 +41,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class ShortArraySource extends ArraySourceHelper implements MutableColumnSourceGetDefaults.ForShort { +public class ShortArraySource extends ArraySourceHelper + implements MutableColumnSourceGetDefaults.ForShort /* MIXIN_IMPLS */ { private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new short[BLOCK_SIZE], null); @@ -79,7 +84,7 @@ public void prepareForParallelPopulation(RowSet changedRows) { return; } - // ensure that this source will have sufficient capacity to store these indices, does not need to be + // ensure that this source will have sufficient capacity to store these rows, does not need to be // null-filled as the values will be immediately written ensureCapacity(changedRows.lastRowKey() + 1, false); @@ -168,6 +173,9 @@ public final short getAndSetUnsafe(long index, short newValue) { return oldValue; } + // region getAndAddUnsafe + // endregion getAndAddUnsafe + @Override public Short getPrev(long rowKey) { return box(getPrevShort(rowKey)); @@ -208,7 +216,7 @@ public void move(long source, long dest, long length) { return; } if (((source - dest) & INDEX_MASK) == 0 && (source & INDEX_MASK) == 0) { - // TODO: we can move full blocks! + // TODO (#3359): we can move full blocks! } if (source < dest && source + length >= dest) { for (long ii = length - 1; ii >= 0; ) { @@ -304,13 +312,133 @@ public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChu return capacity; } + // region fillChunk + @Override + public /* TYPE_MIXIN */ void fillChunk( + @NotNull final ChunkSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparseChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl + MutableInt destOffset = new MutableInt(0); + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + // region copyFromArray + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); + // endregion copyFromArray + destOffset.add(sz); + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); + // endregion copyFromArray + destOffset.add(BLOCK_SIZE); + } + int restSz = (int) (to & INDEX_MASK) + 1; + // region copyFromArray + destination.copyFromArray(getBlock(toBlock), 0, destOffset.intValue(), restSz); + // endregion copyFromArray + destOffset.add(restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillChunk + + private interface CopyFromBlockFunctor { + void copy(int blockNo, int srcOffset, int length); + } + + // region fillPrevChunk + @Override + public /* TYPE_MIXIN */ void fillPrevChunk( + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + if (prevFlusher == null) { + fillChunk(context, destination, rowSequence /* CONVERTER_ARG */); + return; + } + + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + fillSparsePrevChunk(destination, rowSequence /* CONVERTER_ARG */); + return; + } + + final ArraySourceHelper.FillContext effectiveContext = (ArraySourceHelper.FillContext) context; + final MutableInt destOffset = new MutableInt(0); + + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl + + CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { + final long[] inUse = prevInUse[blockNo]; + if (inUse != null) { + // region conditionalCopy + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); + // endregion conditionalCopy + } else { + // region copyFromArray + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); + // endregion copyFromArray + } + destOffset.add(length); + }; + + rowSequence.forAllRowKeyRanges((final long from, final long to) -> { + final int fromBlock = getBlockNo(from); + final int toBlock = getBlockNo(to); + final int fromOffsetInBlock = (int) (from & INDEX_MASK); + if (fromBlock == toBlock) { + final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); + lambda.copy(fromBlock, fromOffsetInBlock, sz); + } else { + final int sz = BLOCK_SIZE - fromOffsetInBlock; + lambda.copy(fromBlock, fromOffsetInBlock, sz); + + for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { + lambda.copy(blockNo, 0, BLOCK_SIZE); + } + + int restSz = (int) (to & INDEX_MASK) + 1; + lambda.copy(toBlock, 0, restSz); + } + }); + destination.setSize(destOffset.intValue()); + } + // endregion fillPrevChunk + + // region fillSparseChunk @Override - protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparseChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { if (rows.size() == 0) { destGeneric.setSize(0); return; } - final WritableShortChunk dest = destGeneric.asWritableShortChunk(); + // region chunkDecl + final WritableShortChunk chunk = destGeneric.asWritableShortChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -318,13 +446,20 @@ protected void fillSparseChunk(@NotNull final WritableChunk dest ctx.capForCurrentBlock = (ctx.currentBlockNo + 1L) << LOG_BLOCK_SIZE; ctx.currentBlock = blocks[ctx.currentBlockNo]; } - dest.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // region conversion + chunk.set(ctx.offset++, ctx.currentBlock[(int) (v & INDEX_MASK)]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparseChunk + // region fillSparsePrevChunk @Override - protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final RowSequence rows) { + protected /* TYPE_MIXIN */ void fillSparsePrevChunk( + @NotNull final WritableChunk destGeneric, + @NotNull final RowSequence rows + /* CONVERTER */) { final long sz = rows.size(); if (sz == 0) { destGeneric.setSize(0); @@ -332,11 +467,13 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk } if (prevFlusher == null) { - fillSparseChunk(destGeneric, rows); + fillSparseChunk(destGeneric, rows /* CONVERTER_ARG */); return; } - final WritableShortChunk dest = destGeneric.asWritableShortChunk(); + // region chunkDecl + final WritableShortChunk chunk = destGeneric.asWritableShortChunk(); + // endregion chunkDecl final FillSparseChunkContext ctx = new FillSparseChunkContext<>(); rows.forAllRowKeys((final long v) -> { if (v >= ctx.capForCurrentBlock) { @@ -351,59 +488,86 @@ protected void fillSparsePrevChunk(@NotNull final WritableChunk final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final boolean usePrev = ctx.prevInUseBlock != null && (ctx.prevInUseBlock[indexWithinInUse] & maskWithinInUse) != 0; - dest.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // region conversion + chunk.set(ctx.offset++, usePrev ? ctx.currentPrevBlock[indexWithinBlock] : ctx.currentBlock[indexWithinBlock]); + // endregion conversion }); - dest.setSize(ctx.offset); + chunk.setSize(ctx.offset); } + // endregion fillSparsePrevChunk + // region fillSparseChunkUnordered @Override - protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableShortChunk dest = destGeneric.asWritableShortChunk(); + protected /* TYPE_MIXIN */ void fillSparseChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destGeneric.asWritableShortChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); } else { final short[] currentBlock = blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparseChunkUnordered + // region fillSparsePrevChunkUnordered @Override - protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk rows) { - final WritableShortChunk dest = destGeneric.asWritableShortChunk(); + protected /* TYPE_MIXIN */ void fillSparsePrevChunkUnordered( + @NotNull final WritableChunk destGeneric, + @NotNull final LongChunk rows + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destGeneric.asWritableShortChunk(); + // endregion chunkDecl final int sz = rows.size(); for (int ii = 0; ii < sz; ++ii) { final long fromIndex = rows.get(ii); if (fromIndex == RowSequence.NULL_ROW_KEY) { - dest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); continue; } final int blockNo = getBlockNo(fromIndex); if (blockNo >= blocks.length) { - dest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); continue; } final short[] currentBlock = shouldUsePrevious(fromIndex) ? prevBlocks[blockNo] : blocks[blockNo]; - dest.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // region conversion + chunk.set(ii, currentBlock[(int) (fromIndex & INDEX_MASK)]); + // endregion conversion } - dest.setSize(sz); + chunk.setSize(sz); } + // endregion fillSparsePrevChunkUnordered + // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -424,33 +588,36 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final short[] inner = blocks[block]; + final short[] block = blocks[block0]; - if (inner != knownUnaliasedBlock && chunk.isAlias(inner)) { + if (block != knownUnaliasedBlock && chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - knownUnaliasedBlock = inner; + knownUnaliasedBlock = block; // This 'if' with its constant condition should be very friendly to the branch predictor. if (trackPrevious) { // this should be vectorized for (int jj = 0; jj < length; ++jj) { if (shouldRecordPrevious(firstKey + jj, prevBlocks, recycler)) { - prevBlocks[block][sIndexWithinBlock + jj] = inner[sIndexWithinBlock + jj]; + prevBlocks[block0][sIndexWithinBlock + jj] = block[sIndexWithinBlock + jj]; } } } - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + // region copyToTypedArray + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); + // endregion copyToTypedArray firstKey += length; offset += length; } } } + // endregion fillFromChunkByRanges - public void copyFromChunk(long firstKey, long totalLength, Chunk src, int offset) { + public void copyFromChunk(long firstKey, final long totalLength, final Chunk src, int offset) { if (totalLength == 0) { return; } @@ -463,22 +630,28 @@ public void copyFromChunk(long firstKey, long totalLength, Chunk> LOG_BLOCK_SIZE); + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); final int sIndexWithinBlock = (int) (firstKey & INDEX_MASK); - final short[] inner = blocks[block]; + final short[] block = blocks[block0]; - chunk.copyToTypedArray(offset, inner, sIndexWithinBlock, length); + chunk.copyToTypedArray(offset, block, sIndexWithinBlock, length); firstKey += length; offset += length; } } + // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + final Chunk src + /* CONVERTER */) { if (rowSequence.size() == 0) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -495,10 +668,10 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk> LOG_BLOCK_SIZE); - final short[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final short[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -508,21 +681,31 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); @@ -535,10 +718,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch final long minKeyInCurrentBlock = firstKey & ~INDEX_MASK; final long maxKeyInCurrentBlock = firstKey | INDEX_MASK; - final int block = (int) (firstKey >> LOG_BLOCK_SIZE); - final short[] inner = blocks[block]; + final int block0 = (int) (firstKey >> LOG_BLOCK_SIZE); + final short[] block = blocks[block0]; - if (chunk.isAlias(inner)) { + if (chunk.isAlias(block)) { throw new UnsupportedOperationException("Source chunk is an alias for target data"); } @@ -548,12 +731,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch if (trackPrevious) { if (shouldRecordPrevious(key, prevBlocks, recycler)) { - prevBlocks[block][indexWithinBlock] = inner[indexWithinBlock]; + prevBlocks[block0][indexWithinBlock] = block[indexWithinBlock]; } } - inner[indexWithinBlock] = chunk.get(ii); + // region conversion + block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } + // endregion fillFromChunkUnordered + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java index 30d4bb40a15..a4c62a276a3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -45,7 +46,8 @@ * * (C-haracter is deliberately spelled that way in order to prevent Replicate from altering this very comment). */ -public class ShortSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForShort { +public class ShortSparseArraySource extends SparseArrayColumnSource + implements MutableColumnSourceGetDefaults.ForShort /* MIXIN_IMPLS */ { // region recyclers private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new short[BLOCK_SIZE], null); @@ -408,7 +410,7 @@ private void commitUpdates() { } @Override - public void prepareForParallelPopulation(RowSet changedRows) { + public void prepareForParallelPopulation(final RowSet changedRows) { final long currentStep = LogicalClock.DEFAULT.currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); @@ -483,8 +485,13 @@ private boolean shouldUsePrevious(final long index) { // region fillByRanges @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByRanges( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forAllRowKeyRanges((long firstKey, final long lastKey) -> { if (firstKey > ctx.maxKeyInCurrentBlock) { @@ -520,8 +527,13 @@ void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSeque // region fillByKeys @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + /* TYPE_MIXIN */ void fillByKeys( + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl final FillByContext ctx = new FillByContext<>(); rowSequence.forEachRowKey((final long v) -> { if (v > ctx.maxKeyInCurrentBlock) { @@ -531,7 +543,9 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc if (ctx.block == null) { chunk.fillWithNullValue(ctx.offset, 1); } else { + // region conversion chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]); + // endregion conversion } ++ctx.offset; return true; @@ -542,12 +556,17 @@ void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequenc // region fillByUnRowSequence @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableShortChunk shortChunk = dest.asWritableShortChunk(); + /* TYPE_MIXIN */ void fillByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - shortChunk.set(ii++, NULL_SHORT); + chunk.set(ii++, NULL_SHORT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -563,25 +582,32 @@ void fillByUnRowSequence(@NotNull WritableChunk dest, @NotNull L } final short [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - shortChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); - shortChunk.set(ii++, block[indexWithinBlock]); + // region conversion + chunk.set(ii++, block[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableShortChunk shortChunk = dest.asWritableShortChunk(); + /* TYPE_MIXIN */ void fillPrevByUnRowSequence( + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ) { final long firstKey = keys.get(ii); if (firstKey == RowSequence.NULL_ROW_KEY) { - shortChunk.set(ii++, NULL_SHORT); + chunk.set(ii++, NULL_SHORT); continue; } final long masked = firstKey & ~INDEX_MASK; @@ -598,7 +624,7 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final short [] block = blocks.getInnermostBlockByKeyOrNull(firstKey); if (block == null) { - shortChunk.fillWithNullValue(ii, lastII - ii + 1); + chunk.fillWithNullValue(ii, lastII - ii + 1); ii = lastII + 1; continue; } @@ -611,7 +637,9 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final short[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; - shortChunk.set(ii++, blockToUse == null ? NULL_SHORT : blockToUse[indexWithinBlock]); + // region conversion + chunk.set(ii++, blockToUse == null ? NULL_SHORT : blockToUse[indexWithinBlock]); + // endregion conversion } } dest.setSize(keys.size()); @@ -620,11 +648,16 @@ void fillPrevByUnRowSequence(@NotNull WritableChunk dest, @NotNu // region fillFromChunkByRanges @Override - void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); final boolean trackPrevious = shouldTrackPrevious(); @@ -696,11 +729,16 @@ private boolean shouldTrackPrevious() { // region fillFromChunkByKeys @Override - void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final RowSequence rowSequence, + @NotNull final Chunk src + /* CONVERTER */) { if (rowSequence.isEmpty()) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); final boolean trackPrevious = shouldTrackPrevious();; @@ -745,7 +783,9 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { if (keys.size() == 0) { return; } + // region chunkDecl final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final boolean trackPrevious = shouldTrackPrevious();; @@ -930,7 +976,9 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch inUse[indexWithinInUse] |= maskWithinInUse; } } + // region conversion block[indexWithinBlock] = chunk.get(ii); + // endregion conversion ++ii; } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } @@ -938,7 +986,10 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch // endregion fillFromChunkUnordered @Override - public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { fillChunk(context, dest, rowSequence); return; @@ -948,7 +999,7 @@ public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ShortChunk getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (rowSequence.isEmpty()) { return ShortChunk.getEmptyChunk(); } @@ -967,7 +1018,7 @@ public ShortChunk getChunk(@NotNull GetContext context, @NotNull RowSequ // region getPrevChunk @Override - public ShortChunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + public ShortChunk getPrevChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) { if (prevFlusher == null) { return getChunk(context, rowSequence); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java index b0eb689cd11..105731a5ca0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java @@ -23,6 +23,8 @@ import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.*; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collection; @@ -329,6 +331,8 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(long si result = new BooleanSparseArraySource(); } else if (type == DateTime.class) { result = new DateTimeSparseArraySource(); + } else if (type == Instant.class) { + result = new InstantSparseArraySource(); } else { if (componentType != null) { result = new ObjectSparseArraySource<>(type, componentType); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java new file mode 100644 index 00000000000..de9f295679e --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java @@ -0,0 +1,111 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.chunk.ObjectChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import org.jetbrains.annotations.NotNull; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates various Time sources to {@code long} values. + */ +public abstract class UnboxedTimeBackedColumnSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForLong { + private final ColumnSource alternateColumnSource; + + private class UnboxingFillContext implements FillContext { + final FillContext alternateFillContext; + final WritableObjectChunk innerChunk; + + private UnboxingFillContext(final int chunkCapacity, final SharedContext sharedContext) { + alternateFillContext = alternateColumnSource.makeFillContext(chunkCapacity, sharedContext); + innerChunk = WritableObjectChunk.makeWritableChunk(chunkCapacity); + } + + @Override + public void close() { + alternateFillContext.close(); + innerChunk.close(); + } + } + + public UnboxedTimeBackedColumnSource(ColumnSource alternateColumnSource) { + super(long.class); + this.alternateColumnSource = alternateColumnSource; + } + + protected abstract long toEpochNano(TIME_TYPE val); + + @Override + public long getLong(long index) { + return toEpochNano(alternateColumnSource.get(index)); + } + + @Override + public long getPrevLong(long index) { + return toEpochNano(alternateColumnSource.getPrev(index)); + } + + @Override + public boolean isImmutable() { + return alternateColumnSource.isImmutable(); + } + + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == alternateColumnSource.getType(); + } + + @Override + public ColumnSource doReinterpret( + @NotNull final Class alternateDataType) throws IllegalArgumentException { + // noinspection unchecked + return (ColumnSource) alternateColumnSource; + } + + @Override + public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { + return new UnboxingFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + // noinspection unchecked + final UnboxingFillContext unboxingFillContext = (UnboxingFillContext) context; + final WritableObjectChunk innerChunk = unboxingFillContext.innerChunk; + alternateColumnSource.fillChunk(unboxingFillContext.alternateFillContext, innerChunk, rowSequence); + convertToLong(destination, innerChunk); + innerChunk.fillWithNullValue(0, innerChunk.size()); + } + + @Override + public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + // noinspection unchecked + final UnboxingFillContext unboxingFillContext = (UnboxingFillContext) context; + final WritableObjectChunk innerChunk = unboxingFillContext.innerChunk; + alternateColumnSource.fillPrevChunk(unboxingFillContext.alternateFillContext, innerChunk, rowSequence); + convertToLong(destination, innerChunk); + innerChunk.fillWithNullValue(0, innerChunk.size()); + } + + private void convertToLong(@NotNull WritableChunk destination, + ObjectChunk innerChunk) { + final WritableLongChunk longDestination = destination.asWritableLongChunk(); + for (int ii = 0; ii < innerChunk.size(); ++ii) { + longDestination.set(ii, toEpochNano(innerChunk.get(ii))); + } + longDestination.setSize(innerChunk.size()); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeArraySource.java new file mode 100644 index 00000000000..5ed1b6d7c68 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeArraySource.java @@ -0,0 +1,62 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.time.TimeZone; +import org.jetbrains.annotations.NotNull; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Array-backed ColumnSource for {@link ZonedDateTime}s. Allows reinterpretation as long. + */ +public class ZonedDateTimeArraySource extends NanosBasedTimeArraySource + implements MutableColumnSourceGetDefaults.ForObject, ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public ZonedDateTimeArraySource(final @NotNull String zone) { + this(ZoneId.of(zone)); + } + + public ZonedDateTimeArraySource(final @NotNull TimeZone zone) { + this(zone.getZoneId()); + } + + public ZonedDateTimeArraySource(final @NotNull ZoneId zone) { + this(zone, new LongArraySource()); + } + + public ZonedDateTimeArraySource(final @NotNull ZoneId zone, final @NotNull LongArraySource nanoSource) { + super(ZonedDateTime.class, nanoSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long nanos) { + return DateTimeUtils.makeZonedDateTime(nanos, zone); + } + + @Override + protected long toNanos(ZonedDateTime value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + + return new ZonedDateTimeArraySource(zone, this.nanoSource); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeAsLongSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeAsLongSource.java new file mode 100644 index 00000000000..d225b12dad2 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeAsLongSource.java @@ -0,0 +1,24 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.time.DateTimeUtils; + +import java.time.ZonedDateTime; + +/** + * Reinterpret result {@link ColumnSource} implementations that translates {@link ZonedDateTime} to {@code long} values. + */ +public class ZonedDateTimeAsLongSource extends UnboxedTimeBackedColumnSource { + + public ZonedDateTimeAsLongSource(ColumnSource alternateColumnSource) { + super(alternateColumnSource); + } + + @Override + protected long toEpochNano(ZonedDateTime val) { + return DateTimeUtils.toEpochNano(val); + } +} \ No newline at end of file diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeSparseArraySource.java new file mode 100644 index 00000000000..1ab98177c9f --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ZonedDateTimeSparseArraySource.java @@ -0,0 +1,62 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.time.TimeZone; +import org.jetbrains.annotations.NotNull; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Array-backed ColumnSource for {@link ZonedDateTime}s. Allows reinterpretation as long. + */ +public class ZonedDateTimeSparseArraySource extends NanosBasedTimeSparseArraySource + implements MutableColumnSourceGetDefaults.ForObject, ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public ZonedDateTimeSparseArraySource(final @NotNull String zone) { + this(ZoneId.of(zone)); + } + + public ZonedDateTimeSparseArraySource(final @NotNull TimeZone zone) { + this(zone.getZoneId()); + } + + public ZonedDateTimeSparseArraySource(final @NotNull ZoneId zone) { + this(zone, new LongSparseArraySource()); + } + + public ZonedDateTimeSparseArraySource(final @NotNull ZoneId zone, final @NotNull LongSparseArraySource nanoSource) { + super(ZonedDateTime.class, nanoSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long nanos) { + return DateTimeUtils.makeZonedDateTime(nanos, zone); + } + + @Override + protected long toNanos(ZonedDateTime value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + + return new ZonedDateTimeSparseArraySource(zone, this.nanoSource); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java index da81f2712db..bc449e38d15 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java @@ -40,7 +40,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableByteArraySource}. */ -public class Immutable2DByteArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForByte, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DByteArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForByte, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -148,27 +151,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableByteChunk asByteChunk = destination.asWritableByteChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asByteChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableByteChunk asByteChunk = destination.asWritableByteChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asByteChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -206,14 +229,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ByteChunk asByteChunk = src.asByteChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asByteChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ByteChunk asByteChunk = src.asByteChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -221,33 +261,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asByteChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ByteChunk asByteChunk = src.asByteChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asByteChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableByteChunk byteDest = dest.asWritableByteChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - byteDest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); } else { - byteDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -279,7 +342,7 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret + // region reinterpretation @Override public boolean allowsReinterpret( @NotNull final Class alternateDataType) { @@ -291,5 +354,5 @@ protected ColumnSource doReinterpret( //noinspection unchecked return (ColumnSource) new ByteAsBooleanColumnSource(this); } - // endregion reinterpret + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java index 061b132a66f..4d1aed9783f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java @@ -33,7 +33,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableCharArraySource}. */ -public class Immutable2DCharArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForChar, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DCharArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForChar, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -141,27 +144,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableCharChunk asCharChunk = destination.asWritableCharChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asCharChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableCharChunk asCharChunk = destination.asWritableCharChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asCharChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -199,14 +222,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final CharChunk asCharChunk = src.asCharChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asCharChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final CharChunk asCharChunk = src.asCharChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -214,33 +254,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asCharChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final CharChunk asCharChunk = src.asCharChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asCharChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableCharChunk charDest = dest.asWritableCharChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - charDest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); } else { - charDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -272,6 +335,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDateTimeArraySource.java new file mode 100644 index 00000000000..17de37f878f --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDateTimeArraySource.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +/** + * Immutable2DArraySource for {@link DateTime}s. Allows reinterpretation as long. + */ +public class Immutable2DDateTimeArraySource extends Immutable2DNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForLongAsDateTime { + + public Immutable2DDateTimeArraySource() { + super(DateTime.class); + } + + public Immutable2DDateTimeArraySource(final @NotNull Immutable2DLongArraySource nanoSource) { + super(DateTime.class, nanoSource); + } + + @Override + protected DateTime makeValue(long nanos) { + return DateTimeUtils.nanosToTime(nanos); + } + + @Override + protected long toNanos(DateTime value) { + return DateTimeUtils.nanos(value); + } + + @Override + public ColumnSource toDateTime() { + return this; + } +} + diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java index 584a77fd18f..588407cbdef 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java @@ -38,7 +38,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableDoubleArraySource}. */ -public class Immutable2DDoubleArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForDouble, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DDoubleArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForDouble, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -146,27 +149,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableDoubleChunk asDoubleChunk = destination.asWritableDoubleChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asDoubleChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableDoubleChunk asDoubleChunk = destination.asWritableDoubleChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asDoubleChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -204,14 +227,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asDoubleChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -219,33 +259,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asDoubleChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asDoubleChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableDoubleChunk doubleDest = dest.asWritableDoubleChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - doubleDest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); } else { - doubleDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -277,6 +340,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java index e29ecaa2225..eff951eabad 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java @@ -38,7 +38,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableFloatArraySource}. */ -public class Immutable2DFloatArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForFloat, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DFloatArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForFloat, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -146,27 +149,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableFloatChunk asFloatChunk = destination.asWritableFloatChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asFloatChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableFloatChunk asFloatChunk = destination.asWritableFloatChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asFloatChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -204,14 +227,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asFloatChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -219,33 +259,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asFloatChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asFloatChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableFloatChunk floatDest = dest.asWritableFloatChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - floatDest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); } else { - floatDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -277,6 +340,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DInstantArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DInstantArraySource.java new file mode 100644 index 00000000000..35311b09e84 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DInstantArraySource.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * Immutable2DArraySource for {@link Instant}s. Allows reinterpretation as long. + */ +public class Immutable2DInstantArraySource extends Immutable2DNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForLongAsInstant { + + public Immutable2DInstantArraySource() { + super(Instant.class); + } + + public Immutable2DInstantArraySource(final @NotNull Immutable2DLongArraySource nanoSource) { + super(Instant.class, nanoSource); + } + + @Override + protected Instant makeValue(long nanos) { + return DateTimeUtils.makeInstant(nanos); + } + + @Override + protected long toNanos(Instant value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toInstant() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java index c8cd871b11d..ee21b03d956 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java @@ -38,7 +38,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableIntArraySource}. */ -public class Immutable2DIntArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForInt, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DIntArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForInt, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -146,27 +149,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableIntChunk asIntChunk = destination.asWritableIntChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asIntChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableIntChunk asIntChunk = destination.asWritableIntChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asIntChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -204,14 +227,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final IntChunk asIntChunk = src.asIntChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asIntChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final IntChunk asIntChunk = src.asIntChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -219,33 +259,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asIntChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final IntChunk asIntChunk = src.asIntChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asIntChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableIntChunk intDest = dest.asWritableIntChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - intDest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); } else { - intDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -277,6 +340,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java index 6b1190a8cc4..33a123aafb9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java @@ -8,6 +8,15 @@ */ package io.deephaven.engine.table.impl.sources.immutable; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import io.deephaven.base.verify.Require; +import java.time.ZoneId; + import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; @@ -42,7 +51,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableLongArraySource}. */ -public class Immutable2DLongArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForLong, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DLongArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForLong, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + , ConvertableTimeSource { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -150,27 +162,86 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableLongChunk asLongChunk = destination.asWritableLongChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asLongChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableLongChunk asLongChunk = destination.asWritableLongChunk(); + void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); + rowSequence.forAllRowKeyRanges((long start, long end) -> { + while (start < end) { + final int segment = keyToSegment(start); + final int offset = keyToOffset(start); + final long segmentEnd = start | segmentMask; + final long realEnd = Math.min(segmentEnd, end); + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + final int destOffset = destPosition.getAndAdd(length); + for (int ii = 0; ii < length; ii++) { + chunk.set(destOffset + ii, converter.apply(data[segment][offset + ii])); + } + // endregion copyFromTypedArrayImmutable2D + start += length; + } + }); + } + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl + final MutableInt srcPos = new MutableInt(0); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + } + void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asLongChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(),converter.apply( getUnsafe(key))); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -208,14 +279,67 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final LongChunk asLongChunk = src.asLongChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asLongChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final LongChunk asLongChunk = src.asLongChunk(); + void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final MutableInt srcPos = new MutableInt(0); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key,converter.applyAsLong( chunk.get(srcPos.getAndIncrement()))); + // endregion conversion + }); + } + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + final MutableInt srcPos = new MutableInt(0); + rowSequence.forAllRowKeyRanges((long start, long end) -> { + while (start < end) { + final int segment = keyToSegment(start); + final int destOffset = keyToOffset(start); + final long segmentEnd = start | segmentMask; + final long realEnd = Math.min(segmentEnd, end); + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; + } + }); + } + void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -223,33 +347,94 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asLongChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + final int offset = srcPos.getAndAdd(length); + for (int jj = 0; jj < length; jj++) { + data[segment][destOffset + jj] = converter.applyAsLong(chunk.get(offset + jj)); + } + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final LongChunk asLongChunk = src.asLongChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ++ii) { + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion + } + } + + public void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asLongChunk.get(ii)); + // region conversion + set(keys.get(ii),converter.applyAsLong( chunk.get(ii))); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableLongChunk longDest = dest.asWritableLongChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - longDest.set(ii, NULL_LONG); + chunk.set(ii, NULL_LONG); } else { - longDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + + public void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ++ii) { + final long rowKey = keys.get(ii); + if (rowKey == RowSequence.NULL_ROW_KEY) { + chunk.set(ii, null); + } else { + // region conversion + chunk.set(ii,converter.apply( getUnsafe((int)(rowKey)))); + // endregion conversion + } + } + } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -281,17 +466,59 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException("Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new Immutable2DZonedDateTimeArraySource(Require.neqNull(zone, "zone"), this); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == DateTime.class; + public ColumnSource toDateTime() { + return new Immutable2DDateTimeArraySource(this); } - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - //noinspection unchecked - return (ColumnSource) new LongAsDateTimeColumnSource(this); + @Override + public ColumnSource toInstant() { + return new Immutable2DInstantArraySource(this); + } + + @Override + public ColumnSource toEpochNano() { + return this; } - // endregion reinterpret + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DNanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DNanosBasedTimeArraySource.java new file mode 100644 index 00000000000..4fee1b01869 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DNanosBasedTimeArraySource.java @@ -0,0 +1,264 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.base.verify.Require; +import io.deephaven.chunk.*; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.RowSequenceFactory; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.table.impl.DefaultGetContext; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.*; +import io.deephaven.time.DateTime; +import org.apache.commons.lang3.mutable.MutableInt; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Arrays; + +// region boxing imports +import static io.deephaven.util.QueryConstants.NULL_LONG; +// endregion boxing imports + +public abstract class Immutable2DNanosBasedTimeArraySource + extends AbstractDeferredGroupingColumnSource + implements WritableColumnSource, FillUnordered, InMemoryColumnSource, ConvertableTimeSource, + WritableSourceWithPrepareForParallelPopulation { + + protected final Immutable2DLongArraySource nanoSource; + + // region constructor + public Immutable2DNanosBasedTimeArraySource( + final @NotNull Class type) { + super(type); + this.nanoSource = new Immutable2DLongArraySource(); + } + + public Immutable2DNanosBasedTimeArraySource( + final @NotNull Class type, + final Immutable2DLongArraySource nanoSource) { + super(type); + this.nanoSource = nanoSource; + } + // endregion constructor + + // region Getters & Setters + protected abstract TIME_TYPE makeValue(final long nanos); + + protected abstract long toNanos(final TIME_TYPE value); + + @Override + public TIME_TYPE get(long rowKey) { + return makeValue(getLong(rowKey)); + } + + @Override + public TIME_TYPE getPrev(long rowKey) { + return makeValue(getPrevLong(rowKey)); + } + + @Override + public final long getLong(long rowKey) { + return nanoSource.getLong(rowKey); + } + + @Override + public final void setNull(long key) { + nanoSource.setNull(key); + } + + @Override + public final void set(long key, long value) { + nanoSource.set(key, value); + } + // endregion Getters & Setters + + @Override + public void ensureCapacity(long capacity, boolean nullFilled) { + nanoSource.ensureCapacity(capacity, nullFilled); + } + + @Override + public FillFromContext makeFillFromContext(int chunkCapacity) { + return nanoSource.makeFillFromContext(chunkCapacity); + } + + @Override + public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { + return nanoSource.makeFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() >= ArrayBackedColumnSource.USE_RANGES_AVERAGE_RUN_LENGTH) { + fillChunkByRanges(destination, rowSequence); + } else { + fillChunkByKeys(destination, rowSequence); + } + } + + private void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + nanoSource.fillChunkByRanges(destination, rowSequence, this::makeValue); + } + + private void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + nanoSource.fillChunkByKeys(destination, rowSequence, this::makeValue); + } + + @Override + public void fillFromChunk( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() >= ArrayBackedColumnSource.USE_RANGES_AVERAGE_RUN_LENGTH) { + fillFromChunkByRanges(src, rowSequence); + } else { + fillFromChunkByKeys(src, rowSequence); + } + } + + private void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence) { + nanoSource.fillFromChunkByKeys(src, rowSequence, this::toNanos); + } + + private void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence) { + nanoSource.fillFromChunkByRanges(src, rowSequence, this::toNanos); + } + + @Override + public void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys) { + nanoSource.fillFromChunkUnordered(context, src, keys, this::toNanos); + } + + @Override + public void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys) { + nanoSource.fillChunkUnordered(context, dest, keys, this::makeValue); + } + + @Override + public void fillPrevChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys) { + fillChunkUnordered(context, dest, keys); + } + + @Override + public void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + fillChunk(context, destination, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getChunk(context, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, long firstKey, long lastKey) { + return getChunk(context, firstKey, lastKey); + } + + @Override + public boolean providesFillUnordered() { + return true; + } + + @Override + public void prepareForParallelPopulation(RowSet rowSet) { + nanoSource.prepareForParallelPopulation(rowSet); + } + + // region reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class + || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException( + "Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new Immutable2DZonedDateTimeArraySource(Require.neqNull(zone, "zone"), nanoSource); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new Immutable2DDateTimeArraySource(nanoSource); + } + + @Override + public ColumnSource toInstant() { + return new Immutable2DInstantArraySource(nanoSource); + } + + @Override + public ColumnSource toEpochNano() { + return nanoSource; + } + // endregion reinterpretation +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java index d8a25580271..9b91f4d9089 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java @@ -37,7 +37,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableObjectArraySource}. */ -public class Immutable2DObjectArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForObject, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DObjectArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForObject, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -145,27 +148,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableObjectChunk asObjectChunk = destination.asWritableObjectChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asObjectChunk.copyFromTypedArray((T[])data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray((T[])data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableObjectChunk asObjectChunk = destination.asWritableObjectChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asObjectChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -203,14 +226,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asObjectChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -218,33 +258,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asObjectChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), (T[])data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), (T[])data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asObjectChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk ObjectDest = dest.asWritableObjectChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - ObjectDest.set(ii, null); + chunk.set(ii, null); } else { - ObjectDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -276,6 +339,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java index 922f9c141b9..b32fe62ae1a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java @@ -38,7 +38,10 @@ * * If your size is smaller than the maximum array size, prefer {@link ImmutableShortArraySource}. */ -public class Immutable2DShortArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForShort, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class Immutable2DShortArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForShort, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private static final int DEFAULT_SEGMENT_SHIFT = 30; private final long segmentShift; private final int segmentMask; @@ -146,27 +149,47 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableShortChunk asShortChunk = destination.asWritableShortChunk(); - final MutableInt destPos = new MutableInt(0); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { final int segment = keyToSegment(start); final int offset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asShortChunk.copyFromTypedArray(data[segment], offset, destPos.getAndAdd(rangeLength), rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyFromTypedArrayImmutable2D + chunk.copyFromTypedArray(data[segment], offset, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable2D + start += length; } }); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableShortChunk asShortChunk = destination.asWritableShortChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asShortChunk.set(srcPos.getAndIncrement(), getUnsafe(key))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(srcPos.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -204,14 +227,31 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ShortChunk asShortChunk = src.asShortChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asShortChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ShortChunk asShortChunk = src.asShortChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { while (start < end) { @@ -219,33 +259,56 @@ private void fillFromChunkByRanges(Chunk src, RowSequence rowS final int destOffset = keyToOffset(start); final long segmentEnd = start | segmentMask; final long realEnd = Math.min(segmentEnd, end); - final int rangeLength = (int)(realEnd - start + 1); - asShortChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data[segment], destOffset, rangeLength); - start += rangeLength; + final int length = (int)(realEnd - start + 1); + // region copyToTypedArrayImmutable2D + chunk.copyToTypedArray(srcPos.getAndAdd(length), data[segment], destOffset, length); + // endregion copyToTypedArrayImmutable2D + start += length; } }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ShortChunk asShortChunk = src.asShortChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asShortChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableShortChunk shortDest = dest.asWritableShortChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long rowKey = keys.get(ii); if (rowKey == RowSequence.NULL_ROW_KEY) { - shortDest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); } else { - shortDest.set(ii, getUnsafe((int)(rowKey))); + // region conversion + chunk.set(ii, getUnsafe((int)(rowKey))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -277,6 +340,6 @@ public void prepareForParallelPopulation(RowSet rowSet) { // nothing to do } - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DZonedDateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DZonedDateTimeArraySource.java new file mode 100644 index 00000000000..1abb613c590 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DZonedDateTimeArraySource.java @@ -0,0 +1,58 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * ImmutableArraySource for {@link ZonedDateTime}s. Allows reinterpretation as long. + */ +public class Immutable2DZonedDateTimeArraySource extends Immutable2DNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForObject, ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public Immutable2DZonedDateTimeArraySource( + final @NotNull ZoneId zone) { + super(ZonedDateTime.class); + this.zone = zone; + } + + public Immutable2DZonedDateTimeArraySource( + final @NotNull ZoneId zone, + final @NotNull Immutable2DLongArraySource nanoSource) { + super(ZonedDateTime.class, nanoSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long nanos) { + return DateTimeUtils.makeZonedDateTime(nanos, zone); + } + + @Override + protected long toNanos(ZonedDateTime value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + + return new Immutable2DZonedDateTimeArraySource(zone, this.nanoSource); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java index 6e91cbb683a..7d51b501ae5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java @@ -39,7 +39,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DByteArraySource}. */ -public class ImmutableByteArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForByte, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableByteArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForByte, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private byte[] data; // region constructor @@ -113,22 +116,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableByteChunk asByteChunk = destination.asWritableByteChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asByteChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asByteChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableByteChunk asByteChunk = destination.asWritableByteChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = destination.asWritableByteChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asByteChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asByteChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -172,42 +195,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ByteChunk asByteChunk = src.asByteChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asByteChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ByteChunk asByteChunk = src.asByteChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asByteChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ByteChunk asByteChunk = src.asByteChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ByteChunk chunk = src.asByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asByteChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableByteChunk byteDest = dest.asWritableByteChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableByteChunk chunk = dest.asWritableByteChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - byteDest.set(ii, NULL_BYTE); + chunk.set(ii, NULL_BYTE); } else { final int key = (int)longKey; - byteDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -251,7 +314,7 @@ public void setArray(byte [] array) { } // endregion setArray - // region reinterpret + // region reinterpretation @Override public boolean allowsReinterpret( @NotNull final Class alternateDataType) { @@ -263,5 +326,5 @@ protected ColumnSource doReinterpret( //noinspection unchecked return (ColumnSource) new ByteAsBooleanColumnSource(this); } - // endregion reinterpret + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java index 4009454c905..0ab7c140525 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java @@ -32,7 +32,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DCharArraySource}. */ -public class ImmutableCharArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForChar, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableCharArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForChar, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private char[] data; // region constructor @@ -106,22 +109,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableCharChunk asCharChunk = destination.asWritableCharChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asCharChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asCharChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableCharChunk asCharChunk = destination.asWritableCharChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = destination.asWritableCharChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asCharChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asCharChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -165,42 +188,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final CharChunk asCharChunk = src.asCharChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asCharChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final CharChunk asCharChunk = src.asCharChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asCharChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final CharChunk asCharChunk = src.asCharChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final CharChunk chunk = src.asCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asCharChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableCharChunk charDest = dest.asWritableCharChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableCharChunk chunk = dest.asWritableCharChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - charDest.set(ii, NULL_CHAR); + chunk.set(ii, NULL_CHAR); } else { final int key = (int)longKey; - charDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -244,6 +307,6 @@ public void setArray(char [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java index 5fb13e6f403..540ad094615 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java @@ -34,7 +34,7 @@ public class ImmutableConstantByteSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForByte, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final byte value; @@ -74,20 +74,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == Boolean.class; - } - - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - //noinspection unchecked - return (ColumnSource) new ByteAsBooleanColumnSource(this); - } - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -112,4 +98,18 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == Boolean.class; + } + + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + //noinspection unchecked + return (ColumnSource) new ByteAsBooleanColumnSource(this); + } + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java index dee2302e1c8..dd9e2fbaac1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java @@ -27,7 +27,7 @@ public class ImmutableConstantCharSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForChar, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final char value; @@ -67,9 +67,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -94,4 +91,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDateTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDateTimeSource.java new file mode 100644 index 00000000000..dca7c384c8a --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDateTimeSource.java @@ -0,0 +1,40 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +/** + * Constant ImmutableColumnSource for {@link DateTime}s. Allows reinterpretation as long. + */ +public class ImmutableConstantDateTimeSource extends ImmutableConstantNanosBasedTimeSource + implements ImmutableColumnSourceGetDefaults.ForLongAsDateTime { + + public ImmutableConstantDateTimeSource(final long nanos) { + super(DateTime.class, new ImmutableConstantLongSource(nanos)); + } + + public ImmutableConstantDateTimeSource(final @NotNull ImmutableConstantLongSource nanoSource) { + super(DateTime.class, nanoSource); + } + + @Override + protected DateTime makeValue(long nanos) { + return DateTimeUtils.nanosToTime(nanos); + } + + @Override + protected long toNanos(DateTime value) { + return DateTimeUtils.nanos(value); + } + + @Override + public ColumnSource toDateTime() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java index c8192949f45..c946099992d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java @@ -32,7 +32,7 @@ public class ImmutableConstantDoubleSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForDouble, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final double value; @@ -72,9 +72,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -99,4 +96,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java index 67e063c645a..68e567aa62a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java @@ -32,7 +32,7 @@ public class ImmutableConstantFloatSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForFloat, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final float value; @@ -72,9 +72,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -99,4 +96,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantInstantSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantInstantSource.java new file mode 100644 index 00000000000..33a5f0832a7 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantInstantSource.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * Constant ImmutableColumnSource for {@link Instant}s. Allows reinterpretation as long. + */ +public class ImmutableConstantInstantSource extends ImmutableConstantNanosBasedTimeSource + implements ImmutableColumnSourceGetDefaults.ForLongAsInstant { + + public ImmutableConstantInstantSource(final long nanos) { + super(Instant.class, new ImmutableConstantLongSource(nanos)); + } + + public ImmutableConstantInstantSource(final @NotNull ImmutableConstantLongSource nanoSource) { + super(Instant.class, nanoSource); + } + + @Override + protected Instant makeValue(long nanos) { + return DateTimeUtils.makeInstant(nanos); + } + + @Override + protected long toNanos(Instant value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toInstant() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java index 8c9d8826f8a..59299fbe3d3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java @@ -32,7 +32,7 @@ public class ImmutableConstantIntSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForInt, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final int value; @@ -72,9 +72,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -99,4 +96,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java index 361c45c9805..8e6996bbd9e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java @@ -8,6 +8,13 @@ */ package io.deephaven.engine.table.impl.sources.immutable; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import io.deephaven.base.verify.Require; +import java.time.ZoneId; + import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; @@ -36,7 +43,7 @@ public class ImmutableConstantLongSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForLong, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource , ConvertableTimeSource { private final long value; @@ -76,20 +83,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == DateTime.class; - } - - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - //noinspection unchecked - return (ColumnSource) new LongAsDateTimeColumnSource(this); - } - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -114,4 +107,60 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException("Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ImmutableConstantZonedDateTimeSource(Require.neqNull(zone, "zone"), this); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new ImmutableConstantDateTimeSource(this); + } + + @Override + public ColumnSource toInstant() { + return new ImmutableConstantInstantSource(this); + } + + @Override + public ColumnSource toEpochNano() { + return this; + } + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java new file mode 100644 index 00000000000..caf8b3213d0 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java @@ -0,0 +1,183 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.base.verify.Require; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.engine.table.impl.sources.InMemoryColumnSource; +import io.deephaven.engine.table.impl.sources.LocalDateWrapperSource; +import io.deephaven.engine.table.impl.sources.LocalTimeWrapperSource; +import io.deephaven.engine.table.impl.sources.RowKeyAgnosticChunkSource; +import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.time.DateTime; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +public abstract class ImmutableConstantNanosBasedTimeSource extends AbstractColumnSource + implements ShiftData.ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource, + ConvertableTimeSource { + + protected final ImmutableConstantLongSource nanoSource; + + // region constructor + public ImmutableConstantNanosBasedTimeSource( + final @NotNull Class type, + final ImmutableConstantLongSource nanoSource) { + super(type); + this.nanoSource = nanoSource; + } + // endregion constructor + + // region Getters & Setters + protected abstract TIME_TYPE makeValue(final long nanos); + + protected abstract long toNanos(final TIME_TYPE value); + + @Override + public TIME_TYPE get(long rowKey) { + return makeValue(getLong(rowKey)); + } + + @Override + public TIME_TYPE getPrev(long rowKey) { + return makeValue(getPrevLong(rowKey)); + } + + @Override + public long getLong(long rowKey) { + return nanoSource.getLong(rowKey); + } + + @Override + public long getPrevLong(long rowKey) { + return nanoSource.getPrevLong(rowKey); + } + + @Override + public final void shift(final long start, final long end, final long offset) {} + // endregion + + // region Chunking + @Override + public final void fillChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + final int size = rowSequence.intSize(); + final TIME_TYPE value = get(0); + destination.setSize(size); + destination.asWritableObjectChunk().fillWithValue(0, size, value); + } + + @Override + public final void fillPrevChunk( + @NotNull final FillContext context, + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence) { + fillChunk(context, destination, rowSequence); + } + + @Override + public void fillChunkUnordered( + @NotNull FillContext context, + @NotNull WritableChunk dest, + @NotNull LongChunk keys) { + final WritableObjectChunk destChunk = dest.asWritableObjectChunk(); + final TIME_TYPE value = get(0); + for (int ii = 0; ii < keys.size(); ++ii) { + destChunk.set(ii, keys.get(ii) == RowSequence.NULL_ROW_KEY ? null : value); + } + destChunk.setSize(keys.size()); + } + + @Override + public void fillPrevChunkUnordered( + @NotNull FillContext context, + @NotNull WritableChunk dest, + @NotNull LongChunk keys) { + fillChunkUnordered(context, dest, keys); + } + + @Override + public boolean providesFillUnordered() { + return true; + } + // endregion Chunking + + + // region Reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class + || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException( + "Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ImmutableConstantZonedDateTimeSource(Require.neqNull(zone, "zone"), nanoSource); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new ImmutableConstantDateTimeSource(nanoSource); + } + + @Override + public ColumnSource toInstant() { + return new ImmutableConstantInstantSource(nanoSource); + } + + @Override + public ColumnSource toEpochNano() { + return nanoSource; + } + // endregion Reinterpretation +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java index 872874053b8..72a81b9a87d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java @@ -31,7 +31,7 @@ public class ImmutableConstantObjectSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForObject, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final T value; @@ -71,9 +71,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -98,4 +95,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java index e0a9f3c5317..2a444754618 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java @@ -32,7 +32,7 @@ public class ImmutableConstantShortSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForShort, ShiftData.ShiftCallback, InMemoryColumnSource, - RowKeyAgnosticChunkSource { + RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final short value; @@ -72,9 +72,6 @@ public final void fillPrevChunk( @Override public final void shift(final long start, final long end, final long offset) {} - // region reinterpret - // endregion reinterpret - @Override public void fillChunkUnordered( @NotNull FillContext context, @@ -99,4 +96,7 @@ public void fillPrevChunkUnordered( public boolean providesFillUnordered() { return true; } + + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantZonedDateTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantZonedDateTimeSource.java new file mode 100644 index 00000000000..a9b4b67ddbb --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantZonedDateTimeSource.java @@ -0,0 +1,52 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Constant ImmutableColumnSource for {@link ZonedDateTime}s. Allows reinterpretation as long. + */ +public class ImmutableConstantZonedDateTimeSource extends ImmutableConstantNanosBasedTimeSource + implements ImmutableColumnSourceGetDefaults.ForObject, ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public ImmutableConstantZonedDateTimeSource( + final @NotNull ZoneId zone, + final @NotNull ImmutableConstantLongSource nanoSource) { + super(ZonedDateTime.class, nanoSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long nanos) { + return DateTimeUtils.makeZonedDateTime(nanos, zone); + } + + @Override + protected long toNanos(ZonedDateTime value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + + return new ImmutableConstantZonedDateTimeSource(zone, this.nanoSource); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDateTimeArraySource.java new file mode 100644 index 00000000000..ad3e8712fe3 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDateTimeArraySource.java @@ -0,0 +1,44 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +/** + * ImmutableArraySource for {@link DateTime}s. Allows reinterpretation as long. + */ +public class ImmutableDateTimeArraySource extends ImmutableNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForLongAsDateTime { + + public ImmutableDateTimeArraySource() { + super(DateTime.class); + } + + public ImmutableDateTimeArraySource(final @NotNull long[] nanos) { + super(DateTime.class, new ImmutableLongArraySource(nanos)); + } + + public ImmutableDateTimeArraySource(final @NotNull ImmutableLongArraySource nanoSource) { + super(DateTime.class, nanoSource); + } + + @Override + protected DateTime makeValue(long nanos) { + return DateTimeUtils.nanosToTime(nanos); + } + + @Override + protected long toNanos(DateTime value) { + return DateTimeUtils.nanos(value); + } + + @Override + public ColumnSource toDateTime() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java index 242a03ccbbf..aa4748b99fc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java @@ -37,7 +37,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DDoubleArraySource}. */ -public class ImmutableDoubleArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForDouble, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableDoubleArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForDouble, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private double[] data; // region constructor @@ -111,22 +114,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableDoubleChunk asDoubleChunk = destination.asWritableDoubleChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asDoubleChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asDoubleChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableDoubleChunk asDoubleChunk = destination.asWritableDoubleChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = destination.asWritableDoubleChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asDoubleChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asDoubleChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -170,42 +193,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asDoubleChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asDoubleChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final DoubleChunk asDoubleChunk = src.asDoubleChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final DoubleChunk chunk = src.asDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asDoubleChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableDoubleChunk doubleDest = dest.asWritableDoubleChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableDoubleChunk chunk = dest.asWritableDoubleChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - doubleDest.set(ii, NULL_DOUBLE); + chunk.set(ii, NULL_DOUBLE); } else { final int key = (int)longKey; - doubleDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -249,6 +312,6 @@ public void setArray(double [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java index bc64fec1e53..3a636512c92 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java @@ -37,7 +37,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DFloatArraySource}. */ -public class ImmutableFloatArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForFloat, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableFloatArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForFloat, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private float[] data; // region constructor @@ -111,22 +114,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableFloatChunk asFloatChunk = destination.asWritableFloatChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asFloatChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asFloatChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableFloatChunk asFloatChunk = destination.asWritableFloatChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = destination.asWritableFloatChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asFloatChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asFloatChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -170,42 +193,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asFloatChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asFloatChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final FloatChunk asFloatChunk = src.asFloatChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final FloatChunk chunk = src.asFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asFloatChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableFloatChunk floatDest = dest.asWritableFloatChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableFloatChunk chunk = dest.asWritableFloatChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - floatDest.set(ii, NULL_FLOAT); + chunk.set(ii, NULL_FLOAT); } else { final int key = (int)longKey; - floatDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -249,6 +312,6 @@ public void setArray(float [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableInstantArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableInstantArraySource.java new file mode 100644 index 00000000000..64113d4feeb --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableInstantArraySource.java @@ -0,0 +1,45 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * ImmutableArraySource for {@link Instant}s. Allows reinterpretation as long. + */ +public class ImmutableInstantArraySource extends ImmutableNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForLongAsInstant { + + public ImmutableInstantArraySource() { + super(Instant.class); + } + + public ImmutableInstantArraySource(final @NotNull long[] nanos) { + super(Instant.class, new ImmutableLongArraySource(nanos)); + } + + public ImmutableInstantArraySource(final @NotNull ImmutableLongArraySource nanoSource) { + super(Instant.class, nanoSource); + } + + @Override + protected Instant makeValue(long nanos) { + return DateTimeUtils.makeInstant(nanos); + } + + @Override + protected long toNanos(Instant value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toInstant() { + return this; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java index dae4be196e1..8b06470654e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java @@ -37,7 +37,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DIntArraySource}. */ -public class ImmutableIntArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForInt, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableIntArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForInt, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private int[] data; // region constructor @@ -111,22 +114,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableIntChunk asIntChunk = destination.asWritableIntChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asIntChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asIntChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableIntChunk asIntChunk = destination.asWritableIntChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = destination.asWritableIntChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asIntChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asIntChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -170,42 +193,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final IntChunk asIntChunk = src.asIntChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asIntChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final IntChunk asIntChunk = src.asIntChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asIntChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final IntChunk asIntChunk = src.asIntChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final IntChunk chunk = src.asIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asIntChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableIntChunk intDest = dest.asWritableIntChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableIntChunk chunk = dest.asWritableIntChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - intDest.set(ii, NULL_INT); + chunk.set(ii, NULL_INT); } else { final int key = (int)longKey; - intDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -249,6 +312,6 @@ public void setArray(int [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java index 97cce9caf9f..669891e854a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java @@ -8,6 +8,15 @@ */ package io.deephaven.engine.table.impl.sources.immutable; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import io.deephaven.base.verify.Require; +import java.time.ZoneId; + import io.deephaven.engine.table.ColumnSource; import io.deephaven.time.DateTime; @@ -41,7 +50,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DLongArraySource}. */ -public class ImmutableLongArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForLong, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableLongArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForLong, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + , ConvertableTimeSource { private long[] data; // region constructor @@ -115,22 +127,76 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableLongChunk asLongChunk = destination.asWritableLongChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asLongChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asLongChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableLongChunk asLongChunk = destination.asWritableLongChunk(); + void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); + rowSequence.forAllRowKeyRanges((long start, long end) -> { + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + final int offset = destPosition.getAndAdd(length); + for (int ii = 0; ii < length; ii++) { + chunk.set(offset + ii, converter.apply(data[(int)start + ii])); + } + // endregion copyFromTypedArrayImmutable + }); + chunk.setSize(destPosition.intValue()); + } + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = destination.asWritableLongChunk(); + // endregion chunkDecl + final MutableInt destPosition = new MutableInt(0); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); + } + void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asLongChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asLongChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(),converter.apply( getUnsafe(key))); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -174,42 +240,150 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final LongChunk asLongChunk = src.asLongChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asLongChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final LongChunk asLongChunk = src.asLongChunk(); + void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final MutableInt srcPos = new MutableInt(0); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key,converter.applyAsLong( chunk.get(srcPos.getAndIncrement()))); + // endregion conversion + }); + } + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asLongChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl + final MutableInt srcPos = new MutableInt(0); + rowSequence.forAllRowKeyRanges((long start, long end) -> { + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + final int offset = srcPos.getAndAdd(length); + for (int jj = 0; jj < length; jj++) { + data[(int)start + jj] = converter.applyAsLong(chunk.get(offset + jj)); + } + // endregion copyToTypedArrayImmutable + }); + } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final LongChunk asLongChunk = src.asLongChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final LongChunk chunk = src.asLongChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ++ii) { + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion + } + } + + public void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + , ToLongFunction converter) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asLongChunk.get(ii)); + // region conversion + set(keys.get(ii),converter.applyAsLong( chunk.get(ii))); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableLongChunk longDest = dest.asWritableLongChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableLongChunk chunk = dest.asWritableLongChunk(); + // endregion chunkDecl + for (int ii = 0; ii < keys.size(); ++ii) { + final long longKey = keys.get(ii); + if (longKey == RowSet.NULL_ROW_KEY) { + chunk.set(ii, NULL_LONG); + } else { + final int key = (int)longKey; + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion + } + } + } + + public void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + , LongFunction converter) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - longDest.set(ii, NULL_LONG); + chunk.set(ii, null); } else { final int key = (int)longKey; - longDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii,converter.apply( getUnsafe(key))); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -253,17 +427,59 @@ public void setArray(long [] array) { } // endregion setArray - // region reinterpret + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException("Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ImmutableZonedDateTimeArraySource(Require.neqNull(zone, "zone"), this); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + @Override - public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return alternateDataType == DateTime.class; + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); } - protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - //noinspection unchecked - return (ColumnSource) new LongAsDateTimeColumnSource(this); + @Override + public ColumnSource toDateTime() { + return new ImmutableDateTimeArraySource(this); + } + + @Override + public ColumnSource toInstant() { + return new ImmutableInstantArraySource(this); + } + + @Override + public ColumnSource toEpochNano() { + return this; } - // endregion reinterpret + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableNanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableNanosBasedTimeArraySource.java new file mode 100644 index 00000000000..4124b2a892b --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableNanosBasedTimeArraySource.java @@ -0,0 +1,238 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.base.verify.Require; +import io.deephaven.engine.table.ColumnSource; + +import io.deephaven.engine.table.SharedContext; +import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.time.DateTime; + +import io.deephaven.chunk.*; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.impl.sources.*; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +public abstract class ImmutableNanosBasedTimeArraySource + extends AbstractDeferredGroupingColumnSource + implements WritableColumnSource, FillUnordered, InMemoryColumnSource, ConvertableTimeSource, + WritableSourceWithPrepareForParallelPopulation { + + protected final ImmutableLongArraySource nanoSource; + + // region constructor + public ImmutableNanosBasedTimeArraySource( + final @NotNull Class type) { + super(type); + this.nanoSource = new ImmutableLongArraySource(); + } + + public ImmutableNanosBasedTimeArraySource( + final @NotNull Class type, + final ImmutableLongArraySource nanoSource) { + super(type); + this.nanoSource = nanoSource; + } + // endregion constructor + + // region Getters & Setters + protected abstract TIME_TYPE makeValue(final long nanos); + + protected abstract long toNanos(final TIME_TYPE value); + + @Override + public TIME_TYPE get(long rowKey) { + return makeValue(getLong(rowKey)); + } + + @Override + public TIME_TYPE getPrev(long rowKey) { + return makeValue(getPrevLong(rowKey)); + } + + @Override + public final long getLong(long rowKey) { + return nanoSource.getLong(rowKey); + } + + @Override + public final void setNull(long key) { + nanoSource.setNull(key); + } + + @Override + public final void set(long key, long value) { + nanoSource.set(key, value); + } + // endregion Getters & Setters + + @Override + public void ensureCapacity(long capacity, boolean nullFilled) { + nanoSource.ensureCapacity(capacity, nullFilled); + } + + @Override + public FillFromContext makeFillFromContext(int chunkCapacity) { + return nanoSource.makeFillFromContext(chunkCapacity); + } + + @Override + public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { + return nanoSource.makeFillContext(chunkCapacity, sharedContext); + } + + @Override + public void fillChunk(@NotNull FillContext context, + @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() >= ArrayBackedColumnSource.USE_RANGES_AVERAGE_RUN_LENGTH) { + fillChunkByRanges(destination, rowSequence); + } else { + fillChunkByKeys(destination, rowSequence); + } + } + + private void fillChunkByRanges(WritableChunk destination, RowSequence rowSequence) { + nanoSource.fillChunkByRanges(destination, rowSequence, this::makeValue); + } + + private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { + nanoSource.fillChunkByKeys(destination, rowSequence, this::makeValue); + } + + @Override + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() >= ArrayBackedColumnSource.USE_RANGES_AVERAGE_RUN_LENGTH) { + fillFromChunkByRanges(src, rowSequence); + } else { + fillFromChunkByKeys(src, rowSequence); + } + } + + private void fillFromChunkByKeys(Chunk src, RowSequence rowSequence) { + nanoSource.fillFromChunkByKeys(src, rowSequence, this::toNanos); + } + + private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { + nanoSource.fillFromChunkByRanges(src, rowSequence, this::toNanos); + } + + @Override + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { + nanoSource.fillFromChunkUnordered(context, src, keys, this::toNanos); + } + + @Override + public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, + @NotNull LongChunk keys) { + nanoSource.fillChunkUnordered(context, dest, keys, this::makeValue); + } + + @Override + public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, + @NotNull LongChunk keys) { + fillChunkUnordered(context, dest, keys); + } + + @Override + public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull RowSequence rowSequence) { + fillChunk(context, destination, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { + return getChunk(context, rowSequence); + } + + @Override + public Chunk getPrevChunk(@NotNull GetContext context, long firstKey, long lastKey) { + return getChunk(context, firstKey, lastKey); + } + + @Override + public boolean providesFillUnordered() { + return true; + } + + @Override + public void prepareForParallelPopulation(RowSet rowSet) { + nanoSource.prepareForParallelPopulation(rowSet); + } + + // region reinterpretation + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class || alternateDataType == Instant.class + || alternateDataType == DateTime.class; + } + + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret( + @NotNull Class alternateDataType) { + if (alternateDataType == this.getType()) { + return (ColumnSource) this; + } else if (alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); + } else if (alternateDataType == long.class || alternateDataType == Long.class) { + return (ColumnSource) toEpochNano(); + } else if (alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } + + throw new IllegalArgumentException( + "Cannot reinterpret `" + getType().getName() + "` to `" + alternateDataType.getName() + "`"); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new ImmutableZonedDateTimeArraySource(Require.neqNull(zone, "zone"), nanoSource); + } + + @Override + public ColumnSource toLocalDate(final @NotNull ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalTime(final @NotNull ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toDateTime() { + return new ImmutableDateTimeArraySource(nanoSource); + } + + @Override + public ColumnSource toInstant() { + return new ImmutableInstantArraySource(nanoSource); + } + + @Override + public ColumnSource toEpochNano() { + return nanoSource; + } + // endregion reinterpretation +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java index f2faf04f18a..9c543764559 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java @@ -36,7 +36,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DObjectArraySource}. */ -public class ImmutableObjectArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForObject, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableObjectArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForObject, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private Object[] data; // region constructor @@ -110,22 +113,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableObjectChunk asObjectChunk = destination.asWritableObjectChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asObjectChunk.copyFromTypedArray((T[])data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray((T[])data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asObjectChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableObjectChunk asObjectChunk = destination.asWritableObjectChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = destination.asWritableObjectChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asObjectChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asObjectChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -169,42 +192,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asObjectChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asObjectChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), (T[])data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), (T[])data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ObjectChunk asObjectChunk = src.asObjectChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ObjectChunk chunk = src.asObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asObjectChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableObjectChunk ObjectDest = dest.asWritableObjectChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableObjectChunk chunk = dest.asWritableObjectChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - ObjectDest.set(ii, null); + chunk.set(ii, null); } else { final int key = (int)longKey; - ObjectDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -248,6 +311,6 @@ public void setArray(Object [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java index 94ac657a09e..7e2be9c38c8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java @@ -37,7 +37,10 @@ * * If your size is greater than the maximum capacity of an array, prefer {@link Immutable2DShortArraySource}. */ -public class ImmutableShortArraySource extends AbstractDeferredGroupingColumnSource implements ImmutableColumnSourceGetDefaults.ForShort, WritableColumnSource, FillUnordered, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation { +public class ImmutableShortArraySource extends AbstractDeferredGroupingColumnSource + implements ImmutableColumnSourceGetDefaults.ForShort, WritableColumnSource, FillUnordered, + InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource, WritableSourceWithPrepareForParallelPopulation + /* MIXIN_IMPLS */ { private short[] data; // region constructor @@ -111,22 +114,42 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, RowSequence rowSequence) { - final WritableShortChunk asShortChunk = destination.asWritableShortChunk(); + // region fillChunkByRanges + /* TYPE_MIXIN */ void fillChunkByRanges( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asShortChunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(rangeLength), rangeLength); + final int length = (int)(end - start + 1); + // region copyFromTypedArrayImmutable + chunk.copyFromTypedArray(data, (int)start, destPosition.getAndAdd(length), length); + // endregion copyFromTypedArrayImmutable }); - asShortChunk.setSize(destPosition.intValue()); + chunk.setSize(destPosition.intValue()); } - - private void fillChunkByKeys(WritableChunk destination, RowSequence rowSequence) { - final WritableShortChunk asShortChunk = destination.asWritableShortChunk(); + // endregion fillChunkByRanges + + // region fillChunkByKeys + /* TYPE_MIXIN */ void fillChunkByKeys( + @NotNull final WritableChunk destination, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = destination.asWritableShortChunk(); + // endregion chunkDecl final MutableInt destPosition = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> asShortChunk.set(destPosition.getAndIncrement(), getUnsafe(key))); - asShortChunk.setSize(destPosition.intValue()); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + chunk.set(destPosition.getAndIncrement(), getUnsafe(key)); + // endregion conversion + }); + chunk.setSize(destPosition.intValue()); } + // endregion fillChunkByKeys @Override public Chunk getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) { @@ -170,42 +193,82 @@ public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, RowSequence rowSequence) { - final ShortChunk asShortChunk = src.asShortChunk(); + // region fillFromChunkByKeys + /* TYPE_MIXIN */ void fillFromChunkByKeys( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); - rowSequence.forAllRowKeys((long key) -> set(key, asShortChunk.get(srcPos.getAndIncrement()))); + rowSequence.forAllRowKeys((long key) -> { + // region conversion + set(key, chunk.get(srcPos.getAndIncrement())); + // endregion conversion + }); } - - private void fillFromChunkByRanges(Chunk src, RowSequence rowSequence) { - final ShortChunk asShortChunk = src.asShortChunk(); + // endregion fillFromChunkByKeys + + // region fillFromChunkByRanges + /* TYPE_MIXIN */ void fillFromChunkByRanges( + @NotNull final Chunk src, + @NotNull final RowSequence rowSequence + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl final MutableInt srcPos = new MutableInt(0); rowSequence.forAllRowKeyRanges((long start, long end) -> { - final int rangeLength = (int)(end - start + 1); - asShortChunk.copyToTypedArray(srcPos.getAndAdd(rangeLength), data, (int)start, rangeLength); + final int length = (int)(end - start + 1); + // region copyToTypedArrayImmutable + chunk.copyToTypedArray(srcPos.getAndAdd(length), data, (int)start, length); + // endregion copyToTypedArrayImmutable }); } + // endregion fillFromChunkByRanges + // region fillFromChunkUnordered @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, @NotNull LongChunk keys) { - final ShortChunk asShortChunk = src.asShortChunk(); + public /* TYPE_MIXIN */ void fillFromChunkUnordered( + @NotNull final FillFromContext context, + @NotNull final Chunk src, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final ShortChunk chunk = src.asShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { - set(keys.get(ii), asShortChunk.get(ii)); + // region conversion + set(keys.get(ii), chunk.get(ii)); + // endregion conversion } } + // endregion fillFromChunkUnordered + // region fillChunkUnordered @Override - public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { - final WritableShortChunk shortDest = dest.asWritableShortChunk(); + public /* TYPE_MIXIN */ void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys + /* CONVERTER */) { + // region chunkDecl + final WritableShortChunk chunk = dest.asWritableShortChunk(); + // endregion chunkDecl for (int ii = 0; ii < keys.size(); ++ii) { final long longKey = keys.get(ii); if (longKey == RowSet.NULL_ROW_KEY) { - shortDest.set(ii, NULL_SHORT); + chunk.set(ii, NULL_SHORT); } else { final int key = (int)longKey; - shortDest.set(ii, getUnsafe(key)); + // region conversion + chunk.set(ii, getUnsafe(key)); + // endregion conversion } } } + // endregion fillChunkUnordered @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull LongChunk keys) { @@ -249,6 +312,6 @@ public void setArray(short [] array) { } // endregion setArray - // region reinterpret - // endregion reinterpret + // region reinterpretation + // endregion reinterpretation } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableZonedDateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableZonedDateTimeArraySource.java new file mode 100644 index 00000000000..ccd12f6da66 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableZonedDateTimeArraySource.java @@ -0,0 +1,58 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.sources.immutable; + +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * ImmutableArraySource for {@link ZonedDateTime}s. Allows reinterpretation as long. + */ +public class ImmutableZonedDateTimeArraySource extends ImmutableNanosBasedTimeArraySource + implements ImmutableColumnSourceGetDefaults.ForObject, ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public ImmutableZonedDateTimeArraySource( + final @NotNull ZoneId zone) { + super(ZonedDateTime.class); + this.zone = zone; + } + + public ImmutableZonedDateTimeArraySource( + final @NotNull ZoneId zone, + final @NotNull ImmutableLongArraySource nanoSource) { + super(ZonedDateTime.class, nanoSource); + this.zone = zone; + } + + @Override + protected ZonedDateTime makeValue(long nanos) { + return DateTimeUtils.makeZonedDateTime(nanos, zone); + } + + @Override + protected long toNanos(ZonedDateTime value) { + return DateTimeUtils.toEpochNano(value); + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + + return new ImmutableZonedDateTimeArraySource(zone, this.nanoSource); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/RingColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/RingColumnSource.java index 76bd5752042..0cb85242242 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/RingColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/RingColumnSource.java @@ -20,6 +20,7 @@ import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; +import java.time.Instant; import java.util.Objects; /** @@ -89,6 +90,9 @@ public static RingColumnSource of(int capacity, Class dataType, Class< } else if (dataType == DateTime.class) { throw new UnsupportedOperationException( "No DateTime chunk source for RingColumnSource - use long and reinterpret"); + } else if (dataType == Instant.class) { + throw new UnsupportedOperationException( + "No Instant chunk source for RingColumnSource - use long and reinterpret"); } else { if (componentType != null) { return ofObject(dataType, componentType, capacity); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java index 5c70751a136..5d77483a600 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java @@ -24,6 +24,7 @@ import io.deephaven.engine.table.ColumnSource; import java.lang.reflect.Array; +import java.time.Instant; import java.util.Optional; /** @@ -150,6 +151,7 @@ private ColumnHolder(String name, boolean grouped, Class dataType, Class c } if (!arrayData.getClass().getComponentType().isAssignableFrom(dataType) && !(dataType == DateTime.class && arrayData.getClass().getComponentType() == long.class) + && !(dataType == Instant.class && arrayData.getClass().getComponentType() == long.class) && !(dataType == Boolean.class && arrayData.getClass().getComponentType() == byte.class)) { throw new IllegalArgumentException( "Incompatible data type: " + dataType + " can not be stored in array of type " diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java new file mode 100644 index 00000000000..98b7f233a14 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java @@ -0,0 +1,341 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.util; + +import io.deephaven.base.verify.Require; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.MatchPair; +import io.deephaven.engine.table.Table; +import io.deephaven.engine.table.impl.select.MatchPairFactory; +import io.deephaven.engine.table.impl.select.ReinterpretedColumn; +import io.deephaven.time.DateTime; +import io.deephaven.util.annotations.ScriptApi; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * This class contains static methods to support conversions between various time types such as {@link DateTime}, + * {@link Instant}, {@link ZonedDateTime}, {@link LocalDate}, {@link LocalTime}, and {@code long}. + * + *

+ * For example, lets say that you wanted to select multiple days from a table, but filter them to a specific times of + * the day. + *

+ * + *
+ * baseTable = db.i("Market", "Trades")
+ *               .where("Date > 2021-10-01")
+ *
+ * startTime = LocalTime.of(10,30,00)
+ * endTime = LocalTime.of(16,30,00)
+ * augmented = TimeTableConversions.asLocalTime(baseTable, "LocalTime = Timestamp", "America/New_York"))
+ *                                 .where("LocalTime.isAfter(startTime)", "LocalTime.isBefore(endTime)")
+ * 
+ */ +public class TableTimeConversions { + @NotNull + private static Table convertTimeColumn(@NotNull final Table source, @NotNull final MatchPair mp, + @NotNull final Class resultType, Object... params) { + final ColumnSource cd = Require.neqNull(source, "source").getColumnSource(mp.rightColumn); + final Class colType = cd.getType(); + + // We cannot simply return the source if we are converting between types that require a zone because we don't + // know + // what the time zones are compared to the current one. so we have to reinterpret and potentially switch zones. + if (colType == resultType && mp.leftColumn.equals(mp.rightColumn) + && (!requiresZone(resultType) || !requiresZone(colType))) { + return source; + } + + return source.updateView(new ReinterpretedColumn<>(mp.rightColumn, colType, mp.leftColumn, resultType, params)); + } + + // region To ZonedDateTime + /** + * Convert the specified column in the table to a {@link ZonedDateTime} column at the specified time zone. The + * column may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link ZonedDateTime}. + */ + @ScriptApi + public static Table asZonedDateTime(final @NotNull Table source, @NotNull final String column, + @NotNull final String zone) { + return asZonedDateTime(source, + MatchPairFactory.getExpression(Require.neqNull(column, "column")), + ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link ZonedDateTime} column at the specified time zone. The + * column may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link ZonedDateTime}. + */ + @ScriptApi + public static Table asZonedDateTime(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final String zone) { + return asZonedDateTime(source, matchPair, ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link ZonedDateTime} column at the specified time zone. + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link ZonedDateTime}. + */ + @ScriptApi + public static Table asZonedDateTime(final @NotNull Table source, @NotNull final String column, + @NotNull final ZoneId zone) { + return asZonedDateTime(source, MatchPairFactory.getExpression(Require.neqNull(column, "column")), zone); + } + + /** + * Convert the specified column in the table to a {@link ZonedDateTime} column at the specified time zone. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link ZonedDateTime}. + */ + @ScriptApi + public static Table asZonedDateTime(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final ZoneId zone) { + return convertTimeColumn(source, matchPair, ZonedDateTime.class, zone); + } + + // endregion + + // region To LocalTime + /** + * Convert the specified column in the table to a {@link LocalTime} column at the specified time zone. The column + * may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalTime}. + */ + @ScriptApi + public static Table asLocalTime(final @NotNull Table source, @NotNull final String column, + @NotNull final String zone) { + return asLocalTime(source, + MatchPairFactory.getExpression(Require.neqNull(column, "column")), + ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link LocalTime} column at the specified time zone. The column + * may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalTime}. + */ + @ScriptApi + public static Table asLocalTime(final @NotNull Table source, @NotNull final String column, + @NotNull final ZoneId zone) { + return asLocalTime(source, MatchPairFactory.getExpression(Require.neqNull(column, "column")), zone); + } + + /** + * Convert the specified column in the table to a {@link LocalTime} column at the specified time zone. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalTime}. + */ + @ScriptApi + public static Table asLocalTime(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final String zone) { + return asLocalTime(source, matchPair, ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link LocalTime} column at the specified time zone. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalTime}. + */ + @ScriptApi + public static Table asLocalTime(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final ZoneId zone) { + return convertTimeColumn(source, matchPair, LocalTime.class, zone); + } + // endregion + + // region To LocalDate + /** + * Convert the specified column in the table to a {@link LocalDate} column at the specified time zone. The column + * may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalDate}. + */ + @ScriptApi + public static Table asLocalDate(final @NotNull Table source, @NotNull final String column, + @NotNull final String zone) { + return asLocalDate(source, + MatchPairFactory.getExpression(Require.neqNull(column, "column")), + ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link LocalDate} column at the specified time zone. The column + * may be specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalDate}. + */ + @ScriptApi + public static Table asLocalDate(final @NotNull Table source, @NotNull final String column, + @NotNull final ZoneId zone) { + return asLocalDate(source, MatchPairFactory.getExpression(Require.neqNull(column, "column")), zone); + } + + /** + * Convert the specified column in the table to a {@link LocalDate} column at the specified time zone. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalDate}. + */ + @ScriptApi + public static Table asLocalDate(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final String zone) { + return asLocalDate(source, matchPair, ZoneId.of(Require.neqNull(zone, "zone"))); + } + + /** + * Convert the specified column in the table to a {@link LocalDate} column at the specified time zone. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @param zone The time zone to use. + * @return the {@link Table} with the specified column converted to {@link LocalDate}. + */ + @ScriptApi + public static Table asLocalDate(final @NotNull Table source, @NotNull final MatchPair matchPair, + @NotNull final ZoneId zone) { + return convertTimeColumn(source, matchPair, LocalDate.class, zone); + } + // endregion + + // region to Instant + /** + * Convert the specified column in the table to an {@link Instant} column. The column may be specified as a single + * value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @return the {@link Table} with the specified column converted to {@link Instant}. + */ + public static Table asInstant(final @NotNull Table source, @NotNull final String column) { + return asInstant(source, MatchPairFactory.getExpression(Require.neqNull(column, "column"))); + } + + /** + * Convert the specified column in the table to an {@link Instant} column. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @return the {@link Table} with the specified column converted to {@link Instant}. + */ + public static Table asInstant(final @NotNull Table source, @NotNull final MatchPair matchPair) { + return convertTimeColumn(source, matchPair, Instant.class); + } + // endregion + + // region to DbDateTime + /** + * Convert the specified column in the table to a {@link DateTime} column. The column may be specified as a single + * value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @return the {@link Table} with the specified column converted to {@link DateTime}. + */ + public static Table asDateTime(final @NotNull Table source, @NotNull final String column) { + return asDateTime(source, MatchPairFactory.getExpression(Require.neqNull(column, "column"))); + } + + /** + * Convert the specified column in the table to a {@link DateTime} column. + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @return the {@link Table} with the specified column converted to {@link DateTime}. + */ + public static Table asDateTime(final @NotNull Table source, @NotNull final MatchPair matchPair) { + return convertTimeColumn(source, matchPair, DateTime.class); + } + // endregion + + // region to EpochNanos + /** + * Convert the specified column in the table to a {@code long} column of nanos since epoch. The column may be + * specified as a single value "Column" or a pair "NewColumn = OriginalColumn" + * + * @param source The source table + * @param column The column to convert, in {@link MatchPair} format + * @return the {@link Table} with the specified column converted to {@code long}. + */ + public static Table asEpochNanos(final @NotNull Table source, @NotNull final String column) { + return asEpochNanos(source, MatchPairFactory.getExpression(Require.neqNull(column, "column"))); + } + + /** + * Convert the specified column in the table to a {@code long} column of nanos since epoch.* + * + * @param source The source table + * @param matchPair The {@link MatchPair} of columns + * @return the {@link Table} with the specified column converted to {@code long}. + */ + public static Table asEpochNanos(final @NotNull Table source, @NotNull final MatchPair matchPair) { + return convertTimeColumn(source, matchPair, long.class); + } + // endregion + + /** + * Check if the supplied type is one of the supported time types. + * + * @param type the type + * @return true if the type is one of the useable time types + */ + public static boolean isTimeType(final @NotNull Class type) { + return type == DateTime.class || type == Instant.class || type == ZonedDateTime.class || + type == LocalDate.class || type == LocalTime.class; + } + + /** + * Check if the supplied time type requires a time zone for construction. + * + * @param type the type + * @return true if the type requires a timezone + */ + public static boolean requiresZone(final @NotNull Class type) { + return type == ZonedDateTime.class || type == LocalDate.class || type == LocalTime.class; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/copy/CopyKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/copy/CopyKernel.java index e65cba7ae8a..f60bb835c52 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/copy/CopyKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/copy/CopyKernel.java @@ -63,7 +63,7 @@ class Utils { * @param flipWords if true return first false bit set instead of the first true bit set * @return the index of the next set bit, any value {@code >= endIndex} is returned if no such bit exists */ - static int nextSetBit(long[] words, int fromIndex, int endIndex, boolean flipWords) { + public static int nextSetBit(long[] words, int fromIndex, int endIndex, boolean flipWords) { if (fromIndex >= endIndex) { return endIndex; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/FreezeByOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/FreezeByOperator.java index cd16863f864..dc72b8b0c97 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/FreezeByOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/FreezeByOperator.java @@ -18,6 +18,7 @@ import io.deephaven.engine.rowset.RowSet; import org.jetbrains.annotations.NotNull; +import java.time.Instant; import java.util.Collections; import java.util.Map; @@ -116,8 +117,8 @@ private static FreezeByHelper makeHelper(WritableColumnSource source, FreezeByCo case Double: return new DoubleFreezeByHelper(source, rowCount); case Object: - if (source.getType() == DateTime.class) { - return new LongFreezeByHelper(source, rowCount); + if (source.getType() == DateTime.class || source.getType() == Instant.class) { + return new LongFreezeByHelper(((NanosBasedTimeArraySource) source).toEpochNano(), rowCount); } else if (source.getType() == Boolean.class) { return new BooleanFreezeByHelper(source, rowCount); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/LongFreezeByHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/LongFreezeByHelper.java index a4bdbd7fba2..633d7c9c5c7 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/LongFreezeByHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/LongFreezeByHelper.java @@ -12,17 +12,17 @@ import io.deephaven.chunk.attributes.ChunkPositions; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.table.WritableColumnSource; -import io.deephaven.engine.table.impl.sources.AbstractLongArraySource; +import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.RowKeys; class LongFreezeByHelper implements FreezeByOperator.FreezeByHelper { - private final AbstractLongArraySource resultSource; + private final LongArraySource resultSource; private final FreezeByCountOperator rowCount; LongFreezeByHelper(WritableColumnSource resultSource, FreezeByCountOperator rowCount) { - this.resultSource = (AbstractLongArraySource)resultSource; + this.resultSource = (LongArraySource)resultSource; this.rowCount = rowCount; } diff --git a/engine/table/src/main/java/io/deephaven/stream/StreamToTableAdapter.java b/engine/table/src/main/java/io/deephaven/stream/StreamToTableAdapter.java index e580ac75182..9cc8ebe9388 100644 --- a/engine/table/src/main/java/io/deephaven/stream/StreamToTableAdapter.java +++ b/engine/table/src/main/java/io/deephaven/stream/StreamToTableAdapter.java @@ -32,6 +32,7 @@ import org.jetbrains.annotations.NotNull; import java.lang.ref.WeakReference; +import java.time.Instant; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -206,6 +207,9 @@ private static SwitchColumnSource[] makeSwitchSources(TableDefinition definit if (columnDefinition.getDataType() == DateTime.class) { // noinspection unchecked visibleSource = new LongAsDateTimeColumnSource((ColumnSource) switchSource); + } else if (columnDefinition.getDataType() == Instant.class) { + // noinspection unchecked + visibleSource = new LongAsInstantColumnSource((ColumnSource) switchSource); } else if (columnDefinition.getDataType() == Boolean.class) { // noinspection unchecked visibleSource = new ByteAsBooleanColumnSource((ColumnSource) switchSource); @@ -233,7 +237,7 @@ private static void maybeClearChunkColumnSource(ColumnSource cs) { * @return the type of the inner column */ private static Class replacementType(Class columnType) { - if (columnType == DateTime.class) { + if (columnType == DateTime.class || columnType == Instant.class) { return long.class; } else if (columnType == Boolean.class) { return byte.class; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestFunctionConsistencyMonitor.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestFunctionConsistencyMonitor.java deleted file mode 100644 index f8c6558aeda..00000000000 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestFunctionConsistencyMonitor.java +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.engine.table.impl.locations.impl; - -import io.deephaven.time.DateTimeUtils; -import io.deephaven.util.SafeCloseable; -import junit.framework.TestCase; -import org.apache.commons.lang3.mutable.MutableBoolean; -import org.apache.commons.lang3.mutable.MutableObject; -import org.junit.Test; - -import java.util.concurrent.atomic.AtomicInteger; - -public class TestFunctionConsistencyMonitor { - @Test - public void testCurrentDateNy() { - DateTimeUtils.currentDateNyOverride = "Aardvark"; - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - DateTimeUtils.currentDateNyOverride = "Armadillo"; - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - } - - TestCase.assertEquals("Armadillo", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - DateTimeUtils.currentDateNyOverride = null; - } - - - @Test - public void testMidStreamRegistration() { - DateTimeUtils.currentDateNyOverride = "Aardvark"; - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - final AtomicInteger atomicInteger = new AtomicInteger(7); - final FunctionConsistencyMonitor.ConsistentSupplier consistentInteger; - - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - DateTimeUtils.currentDateNyOverride = "Armadillo"; - TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - consistentInteger = new CompositeTableDataServiceConsistencyMonitor.ConsistentSupplier<>( - atomicInteger::getAndIncrement); - TestCase.assertEquals((Integer) 7, consistentInteger.get()); - TestCase.assertEquals((Integer) 7, consistentInteger.get()); - TestCase.assertEquals((Integer) 7, consistentInteger.get()); - } - - TestCase.assertEquals((Integer) 8, consistentInteger.get()); - - TestCase.assertEquals("Armadillo", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - DateTimeUtils.currentDateNyOverride = null; - } - - @Test - public void testCurrentDateNyWithThreads() throws InterruptedException { - DateTimeUtils.currentDateNyOverride = "Bobcat"; - TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - - final MutableObject mutableString = new MutableObject<>(); - - Thread t; - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - t = new Thread(() -> { - synchronized (mutableString) { - // do nothing - } - try (final SafeCloseable ignored2 = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - mutableString.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - } - }); - synchronized (mutableString) { - t.start(); - DateTimeUtils.currentDateNyOverride = "Bear"; - } - - TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - } - - t.join(1000); - TestCase.assertEquals("Bear", mutableString.getValue()); - - mutableString.setValue(null); - - final MutableObject mutableString2 = new MutableObject<>(); - final MutableObject mutableString3 = new MutableObject<>(); - final MutableBoolean mutableBoolean = new MutableBoolean(false); - final MutableBoolean gotValueOnce = new MutableBoolean(false); - - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Bear", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - - t = new Thread(() -> { - try (final SafeCloseable ignored2 = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - synchronized (mutableString) { - // do nothing - } - mutableString.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - synchronized (gotValueOnce) { - gotValueOnce.setTrue(); - gotValueOnce.notifyAll(); - } - while (true) { - synchronized (mutableBoolean) { - if (mutableBoolean.booleanValue()) - break; - try { - mutableBoolean.wait(); - } catch (InterruptedException ignored3) { - } - } - } - mutableString3.setValue(DateTimeUtils.currentDateNy()); - mutableString2.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - } - }); - synchronized (mutableString) { - t.start(); - DateTimeUtils.currentDateNyOverride = "Butterfly"; - } - - TestCase.assertEquals("Bear", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - } - - try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - while (true) { - synchronized (gotValueOnce) { - if (gotValueOnce.booleanValue()) { - break; - } - gotValueOnce.wait(); - } - } - synchronized (mutableBoolean) { - DateTimeUtils.currentDateNyOverride = "Buffalo"; - mutableBoolean.setTrue(); - mutableBoolean.notifyAll(); - } - TestCase.assertEquals("Buffalo", CompositeTableDataServiceConsistencyMonitor.consistentDateNy()); - } - - t.join(1000); - TestCase.assertEquals("Butterfly", mutableString.getValue()); - TestCase.assertEquals("Butterfly", mutableString2.getValue()); - TestCase.assertEquals("Buffalo", mutableString3.getValue()); - - DateTimeUtils.currentDateNyOverride = null; - } -} diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java new file mode 100644 index 00000000000..4413b58421e --- /dev/null +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java @@ -0,0 +1,517 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.table.impl.select; + +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.Table; +import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.table.impl.sources.DateTimeArraySource; +import io.deephaven.engine.table.impl.sources.DateTimeSparseArraySource; +import io.deephaven.engine.table.impl.sources.InstantArraySource; +import io.deephaven.engine.table.impl.sources.InstantSparseArraySource; +import io.deephaven.engine.table.impl.sources.LongArraySource; +import io.deephaven.engine.table.impl.sources.LongSparseArraySource; +import io.deephaven.engine.table.impl.sources.ObjectArraySource; +import io.deephaven.engine.table.impl.sources.ObjectSparseArraySource; +import io.deephaven.engine.table.impl.sources.ZonedDateTimeArraySource; +import io.deephaven.engine.table.impl.sources.ZonedDateTimeSparseArraySource; +import io.deephaven.engine.table.impl.util.TableTimeConversions; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.apache.commons.lang3.mutable.MutableInt; +import org.junit.Test; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; + +public class TestReinterpretedColumn extends RefreshingTableTestCase { + final int ROW_COUNT = 60; + private final long baseLongTime = DateTimeUtils.convertDateTime("2021-10-20T09:30:00.000 NY").getNanos(); + private final DateTime baseDBDateTime = DateTimeUtils.convertDateTime("2021-10-19T10:30:00.000 NY"); + private final ZonedDateTime baseZDT = ZonedDateTime.of(2021, 10, 18, 11, 30, 0, 0, ZoneId.of("America/New_York")); + private final Instant baseInstant = DateTimeUtils.convertDateTime("2021-10-17T12:30:00.000 NY").getInstant(); + + private QueryTable baseTable; + private QueryTable sparseBaseTable; + private QueryTable objectTable; + private QueryTable sparseObjectTable; + // private QueryTable regionedTable; + + @Override + public void setUp() throws Exception { + super.setUp(); + + baseTable = makeTable(new LongArraySource(), + new DateTimeArraySource(), + new InstantArraySource(), + new ZonedDateTimeArraySource(ZoneId.of("America/New_York"))); + + sparseBaseTable = makeTable(new LongSparseArraySource(), + new DateTimeSparseArraySource(), + new InstantSparseArraySource(), + new ZonedDateTimeSparseArraySource(ZoneId.of("America/New_York"))); + + objectTable = makeObjectTable(new LongArraySource(), + new ObjectArraySource<>(DateTime.class), + new ObjectArraySource<>(Instant.class), + new ObjectArraySource<>(ZonedDateTime.class)); + + sparseObjectTable = makeObjectTable(new LongSparseArraySource(), + new ObjectSparseArraySource<>(DateTime.class), + new ObjectSparseArraySource<>(Instant.class), + new ObjectSparseArraySource<>(ZonedDateTime.class)); + + // regionedTable = makeRegioned(); + } + + private QueryTable makeObjectTable(WritableColumnSource longSource, WritableColumnSource dtSource, + WritableColumnSource iSource, WritableColumnSource zdtSource) { + longSource.ensureCapacity(ROW_COUNT); + dtSource.ensureCapacity(ROW_COUNT); + iSource.ensureCapacity(ROW_COUNT); + zdtSource.ensureCapacity(ROW_COUNT); + + for (int ii = 0; ii < ROW_COUNT; ii++) { + final long tOff = ii * 60 * 1_000_000_000L; + longSource.set(ii, Long.valueOf(baseLongTime + tOff)); + dtSource.set(ii, DateTimeUtils.nanosToTime(baseDBDateTime.getNanos() + tOff)); + iSource.set(ii, DateTimeUtils.makeInstant(DateTimeUtils.toEpochNano(baseInstant) + tOff)); + zdtSource.set(ii, DateTimeUtils.makeZonedDateTime(DateTimeUtils.toEpochNano(baseZDT) + tOff, + ZoneId.of("America/New_York"))); + } + + final Map> cols = new LinkedHashMap<>(); + cols.put("L", longSource); + cols.put("DT", dtSource); + cols.put("I", iSource); + cols.put("ZDT", zdtSource); + + return new QueryTable(RowSetFactory.flat(ROW_COUNT).toTracking(), cols); + } + + private QueryTable makeTable(WritableColumnSource longSource, WritableColumnSource dtSource, + WritableColumnSource iSource, WritableColumnSource zdtSource) { + longSource.ensureCapacity(ROW_COUNT); + dtSource.ensureCapacity(ROW_COUNT); + iSource.ensureCapacity(ROW_COUNT); + zdtSource.ensureCapacity(ROW_COUNT); + + for (int ii = 0; ii < ROW_COUNT; ii++) { + final long tOff = ii * 60 * 1_000_000_000L; + longSource.set(ii, baseLongTime + tOff); + dtSource.set(ii, baseDBDateTime.getNanos() + tOff); + iSource.set(ii, DateTimeUtils.toEpochNano(baseInstant) + tOff); + zdtSource.set(ii, DateTimeUtils.toEpochNano(baseZDT) + tOff); + } + + final Map> cols = new LinkedHashMap<>(); + cols.put("L", longSource); + cols.put("DT", dtSource); + cols.put("I", iSource); + cols.put("ZDT", zdtSource); + + return new QueryTable(RowSetFactory.flat(ROW_COUNT).toTracking(), cols); + } + + // private QueryTable makeRegioned() throws IOException { + // final Path rootPath = Files.createTempDirectory(Paths.get(Configuration.getInstance().getWorkspacePath()), + // "TestReinterpret"); + // final File rootFile = rootPath.toFile(); + // final SchemaService schemaService = + // SchemaServiceFactoryForTest.getTransientInstance(Configuration.getInstance()); + // + // final Path namespacePath = rootPath.resolve(Paths.get("Intraday", "TR", "TR")); + // namespacePath.toFile().mkdirs(); + // + // Configuration.getInstance().setProperty("IrisDB.permissionFilterProvider", "null"); + // PermissionFilterProvider.FACTORY.reload(); + // + // final LocalTablePathManager pathManager = new LocalTablePathManager(rootFile); + // final OnDiskQueryDatabase db = new OnDiskQueryDatabase(Logger.NULL, rootFile, new + // LocalTableDataService(pathManager), schemaService); + // db.setUserContext(null, new SimpleUserContext("nobody", "nobody")); + // + // final TableDefinition forNPT = baseTable.updateView("PC=`1`").getDefinition(); + // forNPT.setStorageType(DefaultTableDefinition.STORAGETYPE_NESTEDPARTITIONEDONDISK); + // forNPT.setNamespace("TR"); + // forNPT.setName("TR"); + // forNPT.getColumn("PC").setColumnType(DefaultColumnDefinition.COLUMNTYPE_PARTITIONING); + // forNPT.getColumn("ZDT").setObjectCodecClass(ZonedDateTimeCodec.class.getName()); + // + // schemaService.createNamespace(NamespaceSet.SYSTEM, "TR"); + // schemaService.addSchema(schemaService.fromDefinition(forNPT, "TR", "TR", + // TableDefinition.STORAGETYPE_NESTEDPARTITIONEDONDISK, NamespaceSet.SYSTEM)); + // + // final List slices = new ArrayList<>(); + // for(int ii = 0; ii< ROW_COUNT; ii += 10) { + // slices.add(baseTable.slice(ii, Math.min(baseTable.size(), ii + 10))); + // } + // + // final File[] dests = new File[slices.size()]; + // for(int ii = 0; ii < slices.size(); ii++) { + // dests[ii] = pathManager.getLocation(new FullTableLocationKey("TR", "TR", TableType.SYSTEM_INTRADAY, + // Integer.toString(ii), "1")); + // } + // + // TableManagementTools.writeTables(slices.toArray(Table.ZERO_LENGTH_TABLE_ARRAY), forNPT.getWritable(), dests); + // + // return (QueryTable) db.i("TR", "TR").where(); + // } + + private long computeTimeDiff(final int iteration, boolean invert) { + return (invert ? ROW_COUNT - iteration - 1 : iteration) * 60 * 1_000_000_000L; + } + + @Test + public void testReinterpretLong() { + testReinterpretLong(baseTable, false, false); + testReinterpretLong(baseTable, false, true); + testReinterpretLong(sparseBaseTable, false, false); + testReinterpretLong(sparseBaseTable, false, true); + testReinterpretLong(objectTable, false, false); + testReinterpretLong(objectTable, false, true); + testReinterpretLong(sparseObjectTable, false, false); + testReinterpretLong(sparseObjectTable, false, true); + // testReinterpretLong(regionedTable, false, false); + // testReinterpretLong(regionedTable, false, true); + } + + private void testReinterpretLong(final Table initial, boolean isSorted, boolean withRename) { + final String lColName = withRename ? "R_L" : "L"; + final String dtColName = withRename ? "R_DT" : "DT"; + final String iColName = withRename ? "R_I" : "I"; + final String zdtColName = withRename ? "R_ZDT" : "ZDT"; + + // Make everything a long + Table table = TableTimeConversions.asEpochNanos(initial, lColName + "=L"); + table = TableTimeConversions.asEpochNanos(table, dtColName + "=DT"); + table = TableTimeConversions.asEpochNanos(table, iColName + "=I"); + table = TableTimeConversions.asEpochNanos(table, zdtColName + "=ZDT"); + + TableDefinition td = table.getDefinition(); + assertEquals(long.class, td.getColumn(lColName).getDataType()); + if (!withRename) { + assertEquals(initial.getColumnSource("L"), table.getColumnSource(lColName)); + } + assertEquals(long.class, td.getColumn(dtColName).getDataType()); + assertEquals(long.class, td.getColumn(iColName).getDataType()); + assertEquals(long.class, td.getColumn(zdtColName).getDataType()); + + final MutableInt ii = new MutableInt(0); + for (final RowSet.Iterator it = table.getRowSet().iterator(); it.hasNext();) { + final long key = it.nextLong(); + final long tOff = computeTimeDiff(ii.getAndIncrement(), isSorted); + if (table.getColumnSource(lColName) instanceof ObjectArraySource + || table.getColumnSource(lColName) instanceof ObjectSparseArraySource) { + assertEquals(baseLongTime + tOff, table.getColumnSource(lColName).get(key)); + } else { + assertEquals(baseLongTime + tOff, table.getColumnSource(lColName).getLong(key)); + } + assertEquals(baseDBDateTime.getNanos() + tOff, table.getColumnSource(dtColName).getLong(key)); + assertEquals(DateTimeUtils.toEpochNano(baseInstant) + tOff, table.getColumnSource(iColName).getLong(key)); + assertEquals(DateTimeUtils.toEpochNano(baseZDT) + tOff, table.getColumnSource(zdtColName).getLong(key)); + } + + // Repeat the same comparisons, but actuate fillChunk instead + reinterpLongChunkCheck(table.getColumnSource(lColName), table.getRowSet(), isSorted, baseLongTime); + reinterpLongChunkCheck(table.getColumnSource(dtColName), table.getRowSet(), isSorted, + baseDBDateTime.getNanos()); + reinterpLongChunkCheck(table.getColumnSource(iColName), table.getRowSet(), isSorted, + DateTimeUtils.toEpochNano(baseInstant)); + reinterpLongChunkCheck(table.getColumnSource(zdtColName), table.getRowSet(), isSorted, + DateTimeUtils.toEpochNano(baseZDT)); + + if (!isSorted) { + testReinterpretLong(initial.sortDescending("L"), true, withRename); + } + } + + private void reinterpLongChunkCheck(final ColumnSource cs, RowSet rowSet, final boolean isSorted, + final long baseNanos) { + try (final ChunkSource.FillContext fc = cs.makeFillContext(64); + final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(64)) { + cs.fillChunk(fc, chunk, rowSet); + + for (int ii = 0; ii < chunk.size(); ii++) { + final long tOff = computeTimeDiff(ii, isSorted); + assertEquals(baseNanos + tOff, chunk.get(ii)); + } + } + } + + private void doReinterpretTestBasic(final Table initial, + final Class expectedType, + final BiFunction reinterpreter, + String equalColumn, + Function toNanoFunc) { + doReinterpretTestBasic(initial, expectedType, reinterpreter, equalColumn, toNanoFunc, false, t -> { + }, false); + } + + private void doReinterpretTestBasic(final Table initial, + final Class expectedType, + final BiFunction reinterpreter, + String equalColumn, + Function toNanoFunc, + Consumer extraCheck) { + doReinterpretTestBasic(initial, expectedType, reinterpreter, equalColumn, toNanoFunc, false, extraCheck, false); + } + + @SuppressWarnings("unchecked") + private void doReinterpretTestBasic(final Table initial, + final Class expectedType, + final BiFunction reinterpreter, + String equalColumn, + Function toNanoFunc, + boolean isSorted, + Consumer extraCheck, + boolean withRename) { + final String lColName = withRename ? "R_L" : "L"; + final String dtColName = withRename ? "R_DT" : "DT"; + final String iColName = withRename ? "R_I" : "I"; + final String zdtColName = withRename ? "R_ZDT" : "ZDT"; + + // Make everything a DBDateTime + Table table = reinterpreter.apply(initial, lColName + "=L"); + table = reinterpreter.apply(table, dtColName + "=DT"); + table = reinterpreter.apply(table, iColName + "=I"); + table = reinterpreter.apply(table, zdtColName + "=ZDT"); + + TableDefinition td = table.getDefinition(); + assertEquals(expectedType, td.getColumn(lColName).getDataType()); + assertEquals(expectedType, td.getColumn(dtColName).getDataType()); + assertEquals(expectedType, td.getColumn(iColName).getDataType()); + assertEquals(expectedType, td.getColumn(zdtColName).getDataType()); + + if (equalColumn != null && !withRename) { + assertEquals(initial.getColumnSource(equalColumn), table.getColumnSource(equalColumn)); + } + + final MutableInt ii = new MutableInt(0); + for (final RowSet.Iterator it = table.getRowSet().iterator(); it.hasNext();) { + final long key = it.nextLong(); + final long tOff = computeTimeDiff(ii.getAndIncrement(), isSorted); + assertEquals(baseLongTime + tOff, + (long) toNanoFunc.apply((T) table.getColumnSource(lColName).get(key))); + extraCheck.accept((T) table.getColumnSource(lColName).get(key)); + assertEquals(baseDBDateTime.getNanos() + tOff, + (long) toNanoFunc.apply((T) table.getColumnSource(dtColName).get(key))); + extraCheck.accept((T) table.getColumnSource(dtColName).get(key)); + assertEquals(DateTimeUtils.toEpochNano(baseInstant) + tOff, + (long) toNanoFunc.apply((T) table.getColumnSource(iColName).get(key))); + extraCheck.accept((T) table.getColumnSource(iColName).get(key)); + assertEquals(DateTimeUtils.toEpochNano(baseZDT) + tOff, + (long) toNanoFunc.apply((T) table.getColumnSource(zdtColName).get(key))); + extraCheck.accept((T) table.getColumnSource(zdtColName).get(key)); + } + + // Repeat the same comparisons, but actuate fillChunk instead + reinterpBasicChunkCheck(table.getColumnSource(lColName), table.getRowSet(), toNanoFunc, isSorted, + baseLongTime, extraCheck); + reinterpBasicChunkCheck(table.getColumnSource(dtColName), table.getRowSet(), toNanoFunc, isSorted, + baseDBDateTime.getNanos(), extraCheck); + reinterpBasicChunkCheck(table.getColumnSource(iColName), table.getRowSet(), toNanoFunc, isSorted, + DateTimeUtils.toEpochNano(baseInstant), extraCheck); + reinterpBasicChunkCheck(table.getColumnSource(zdtColName), table.getRowSet(), toNanoFunc, isSorted, + DateTimeUtils.toEpochNano(baseZDT), extraCheck); + + if (!isSorted) { + doReinterpretTestBasic(initial.sortDescending("L"), expectedType, reinterpreter, equalColumn, toNanoFunc, + true, extraCheck, withRename); + } + } + + private void reinterpBasicChunkCheck(final ColumnSource cs, final RowSet rowSet, + final Function toNanoFunc, final boolean isSorted, final long baseNanos, + final Consumer extraCheck) { + try (final ChunkSource.FillContext fc = cs.makeFillContext(64); + final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(64)) { + cs.fillChunk(fc, chunk, rowSet); + + for (int ii = 0; ii < chunk.size(); ii++) { + final long tOff = computeTimeDiff(ii, isSorted); + assertEquals(baseNanos + tOff, (long) toNanoFunc.apply(chunk.get(ii))); + extraCheck.accept(chunk.get(ii)); + } + } + } + + @Test + public void testReinterpretDBDT() { + doReinterpretTestBasic( + baseTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); + doReinterpretTestBasic( + sparseBaseTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); + doReinterpretTestBasic( + objectTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); + doReinterpretTestBasic( + sparseObjectTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); + // doReinterpretTestBasic( + // regionedTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); + } + + @Test + public void testReinterpretInstant() { + doReinterpretTestBasic( + baseTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); + doReinterpretTestBasic( + sparseBaseTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); + doReinterpretTestBasic( + objectTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); + doReinterpretTestBasic( + sparseObjectTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); + // doReinterpretTestBasic( + // regionedTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); + } + + @Test + public void testReinterpretZdt() { + final Consumer extraCheck = + zdt -> assertTrue(zdt == null || zdt.getZone().equals(ZoneId.of("America/Chicago"))); + + doReinterpretTestBasic(baseTable, ZonedDateTime.class, + (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), + null, DateTimeUtils::toEpochNano, extraCheck); + doReinterpretTestBasic(sparseBaseTable, ZonedDateTime.class, + (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), + null, DateTimeUtils::toEpochNano, extraCheck); + doReinterpretTestBasic(objectTable, ZonedDateTime.class, + (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), + null, DateTimeUtils::toEpochNano, extraCheck); + doReinterpretTestBasic(sparseObjectTable, ZonedDateTime.class, + (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), + null, DateTimeUtils::toEpochNano, extraCheck); + // doReinterpretTestBasic(regionedTable, ZonedDateTime.class, + // (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), + // null, DateTimeUtils::toEpochNano, extraCheck); + } + + private void reinterpWrappedChunkCheck(final ColumnSource cs, RowSet rowSet, final boolean isSorted, + final BiFunction expectedSupplier) { + try (final ChunkSource.FillContext fc = cs.makeFillContext(64); + final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(64)) { + cs.fillChunk(fc, chunk, rowSet); + + for (int ii = 0; ii < chunk.size(); ii++) { + assertEquals(expectedSupplier.apply(ii, isSorted), chunk.get(ii)); + } + } + } + + @Test + public void testReinterpretLocalDate() { + doTestReinterpretLocalDate(baseTable, false); + doTestReinterpretLocalDate(sparseBaseTable, false); + doTestReinterpretLocalDate(objectTable, false); + doTestReinterpretLocalDate(sparseObjectTable, false); + // doTestReinterpretLocalDate(regionedTable, false); + } + + private void doTestReinterpretLocalDate(final Table initial, boolean sorted) { + Table table = TableTimeConversions.asLocalDate(initial, "L", "America/Chicago"); + table = TableTimeConversions.asLocalDate(table, "DT", "America/Chicago"); + table = TableTimeConversions.asLocalDate(table, "I", "America/Chicago"); + table = TableTimeConversions.asLocalDate(table, "ZDT", "America/Chicago"); + + TableDefinition td = table.getDefinition(); + assertEquals(LocalDate.class, td.getColumn("L").getDataType()); + assertEquals(LocalDate.class, td.getColumn("DT").getDataType()); + assertEquals(LocalDate.class, td.getColumn("I").getDataType()); + assertEquals(LocalDate.class, td.getColumn("ZDT").getDataType()); + + for (final RowSet.Iterator it = table.getRowSet().iterator(); it.hasNext();) { + final long key = it.nextLong(); + assertEquals(LocalDate.of(2021, 10, 20), table.getColumnSource("L").get(key)); + assertEquals(LocalDate.of(2021, 10, 19), table.getColumnSource("DT").get(key)); + assertEquals(LocalDate.of(2021, 10, 18), table.getColumnSource("ZDT").get(key)); + assertEquals(LocalDate.of(2021, 10, 17), table.getColumnSource("I").get(key)); + } + + reinterpWrappedChunkCheck( + table.getColumnSource("L"), table.getRowSet(), sorted, (i, s) -> LocalDate.of(2021, 10, 20)); + reinterpWrappedChunkCheck( + table.getColumnSource("DT"), table.getRowSet(), sorted, (i, s) -> LocalDate.of(2021, 10, 19)); + reinterpWrappedChunkCheck( + table.getColumnSource("ZDT"), table.getRowSet(), sorted, (i, s) -> LocalDate.of(2021, 10, 18)); + reinterpWrappedChunkCheck( + table.getColumnSource("I"), table.getRowSet(), sorted, (i, s) -> LocalDate.of(2021, 10, 17)); + + if (!sorted) { + doTestReinterpretLocalDate(initial.sortDescending("L"), true); + } + } + + @Test + public void testReinterpretLocalTime() { + doTestReinterpretLocalTime(baseTable, false); + doTestReinterpretLocalTime(sparseBaseTable, false); + doTestReinterpretLocalTime(objectTable, false); + doTestReinterpretLocalTime(sparseObjectTable, false); + // doTestReinterpretLocalTime(regionedTable, false); + } + + private void doTestReinterpretLocalTime(final Table initial, boolean sorted) { + Table table = TableTimeConversions.asLocalTime(initial, "L", "America/Chicago"); + table = TableTimeConversions.asLocalTime(table, "DT", "America/Chicago"); + table = TableTimeConversions.asLocalTime(table, "I", "America/Chicago"); + table = TableTimeConversions.asLocalTime(table, "ZDT", "America/Chicago"); + + TableDefinition td = table.getDefinition(); + assertEquals(LocalTime.class, td.getColumn("L").getDataType()); + assertEquals(LocalTime.class, td.getColumn("DT").getDataType()); + assertEquals(LocalTime.class, td.getColumn("I").getDataType()); + assertEquals(LocalTime.class, td.getColumn("ZDT").getDataType()); + + final MutableInt ii = new MutableInt(0); + for (final RowSet.Iterator it = table.getRowSet().iterator(); it.hasNext();) { + final long key = it.nextLong(); + final int localII = ii.getAndIncrement(); + final int startIter = sorted ? ROW_COUNT - localII - 1 : localII; + final int hourOff = startIter / 30; + final int minute = (startIter + 30) % 60; + assertEquals(LocalTime.of(8 + hourOff, minute, 0), table.getColumnSource("L").get(key)); + assertEquals(LocalTime.of(9 + hourOff, minute, 0), table.getColumnSource("DT").get(key)); + assertEquals(LocalTime.of(10 + hourOff, minute, 0), table.getColumnSource("ZDT").get(key)); + assertEquals(LocalTime.of(11 + hourOff, minute, 0), table.getColumnSource("I").get(key)); + } + + reinterpWrappedChunkCheck( + table.getColumnSource("L"), table.getRowSet(), sorted, (i, s) -> makeLocalTime(8, i, s)); + reinterpWrappedChunkCheck( + table.getColumnSource("DT"), table.getRowSet(), sorted, (i, s) -> makeLocalTime(9, i, s)); + reinterpWrappedChunkCheck( + table.getColumnSource("ZDT"), table.getRowSet(), sorted, (i, s) -> makeLocalTime(10, i, s)); + reinterpWrappedChunkCheck( + table.getColumnSource("I"), table.getRowSet(), sorted, (i, s) -> makeLocalTime(11, i, s)); + + if (!sorted) { + doTestReinterpretLocalTime(initial.sortDescending("L"), true); + } + } + + private LocalTime makeLocalTime(int hour, int ii, boolean sorted) { + final int startIter = sorted ? ROW_COUNT - ii - 1 : ii; + final int hourOff = startIter / 30; + final int minute = (startIter + 30) % 60; + + return LocalTime.of(hour + hourOff, minute, 0); + } +} diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java b/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java index 2eb68f4aa36..8c5a5a1bace 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java @@ -4,12 +4,9 @@ package io.deephaven.engine.util; import io.deephaven.engine.context.TestExecutionContext; -import io.deephaven.time.DateTimeUtils; import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; -import java.util.Collections; - public class TestCompileSimpleFunction extends TestCase { private SafeCloseable executionContext; @@ -30,12 +27,6 @@ public void testString() { TestCase.assertEquals("Hello, world", res); } - public void testImport() { - String res = DynamicCompileUtils.compileSimpleFunction(String.class, "return currentDateNy()", - Collections.emptyList(), Collections.singleton(DateTimeUtils.class)).get(); - TestCase.assertEquals(DateTimeUtils.currentDateNy(), res); - } - public void testNotString() { try { DynamicCompileUtils.compileSimpleFunction(String.class, "return 7"); diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ColumnInfo.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ColumnInfo.java index 32df59a20f7..c1bbe67458f 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ColumnInfo.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ColumnInfo.java @@ -9,6 +9,7 @@ import io.deephaven.engine.testutil.sources.ImmutableColumnHolder; import io.deephaven.time.DateTime; +import java.time.Instant; import java.util.Arrays; import java.util.Random; @@ -40,7 +41,7 @@ public ColumnInfo(TestDataGenerator generator, String name, ColAttributes. public ColumnHolder generateInitialColumn(RowSet rowSet, Random random) { final Chunk initialData = generator.populateChunk(rowSet, random); - if (dataType == Long.class && type == DateTime.class) { + if (dataType == Long.class && (type == DateTime.class || type == Instant.class)) { Require.eqFalse(immutable, "immutable"); Require.eqFalse(grouped, "grouped"); return ColumnHolder.getDateTimeColumnHolder(name, false, initialData); diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/TstUtils.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/TstUtils.java index 4e2b9f9658a..cc8e7dbb51c 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/TstUtils.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/TstUtils.java @@ -32,10 +32,12 @@ import io.deephaven.engine.testutil.sources.CharTestSource; import io.deephaven.engine.testutil.sources.ImmutableDoubleTestSource; import io.deephaven.engine.testutil.sources.ImmutableFloatTestSource; +import io.deephaven.engine.testutil.sources.ImmutableInstantTestSource; import io.deephaven.engine.testutil.sources.ImmutableIntTestSource; import io.deephaven.engine.testutil.sources.ImmutableLongTestSource; import io.deephaven.engine.testutil.sources.ImmutableObjectTestSource; import io.deephaven.engine.testutil.sources.ImmutableShortTestSource; +import io.deephaven.engine.testutil.sources.InstantTestSource; import io.deephaven.engine.testutil.sources.IntTestSource; import io.deephaven.engine.testutil.sources.LongTestSource; import io.deephaven.engine.testutil.sources.ObjectTestSource; @@ -63,6 +65,7 @@ import java.lang.reflect.Array; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; +import java.time.Instant; import java.util.*; import java.util.function.BiConsumer; @@ -624,6 +627,9 @@ private static ColumnSource getTestColumnSourceFromChunk( } else if (unboxedType == DateTime.class) { // noinspection unchecked result = (AbstractColumnSource) new ImmutableDateTimeTestSource(rowSet, chunkData); + } else if (unboxedType == Instant.class) { + // noinspection unchecked + result = (AbstractColumnSource) new ImmutableInstantTestSource(rowSet, chunkData); } else { result = new ImmutableObjectTestSource<>(columnHolder.dataType, rowSet, chunkData); } @@ -653,6 +659,9 @@ private static ColumnSource getTestColumnSourceFromChunk( } else if (unboxedType == DateTime.class) { // noinspection unchecked result = (AbstractColumnSource) new DateTimeTestSource(rowSet, chunkData); + } else if (unboxedType == Instant.class) { + // noinspection unchecked + result = (AbstractColumnSource) new InstantTestSource(rowSet, chunkData); } else { result = new ObjectTestSource<>(columnHolder.dataType, rowSet, chunkData); } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableByteTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableByteTestSource.java index 4e5dd8342d8..e9ed4f64c5a 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableByteTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableByteTestSource.java @@ -26,8 +26,8 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to byte values. If an uninitialized key is accessed; - * then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is + * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ public class ImmutableByteTestSource extends AbstractColumnSource diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableCharTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableCharTestSource.java index 6c4fd634a9c..41bbeaad95c 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableCharTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableCharTestSource.java @@ -21,8 +21,8 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to char values. If an uninitialized key is accessed; - * then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is + * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ public class ImmutableCharTestSource extends AbstractColumnSource diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableDoubleTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableDoubleTestSource.java index 3732169ff0f..1c79583675f 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableDoubleTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableDoubleTestSource.java @@ -26,7 +26,7 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to double values. If an uninitialized key is + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableFloatTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableFloatTestSource.java index 290d8a98a97..47480743bf7 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableFloatTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableFloatTestSource.java @@ -26,7 +26,7 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to float values. If an uninitialized key is + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableInstantTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableInstantTestSource.java new file mode 100644 index 00000000000..46649a132d2 --- /dev/null +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableInstantTestSource.java @@ -0,0 +1,134 @@ +/** + * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.testutil.sources; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.ObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTime; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.type.TypeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * DateTime column source that wraps and delegates the storage to an {@code ImmutableLongTestSource}. This also + * provides an interface so this column can be interpreted as a long column (through UnboxedDateTimeTestSource). + */ +public class ImmutableInstantTestSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForObject, TestColumnSource { + + private final ImmutableLongTestSource longTestSource; + private final UnboxedInstantTestSource alternateColumnSource; + + /** + * Create a new ImmutableDateTimeTestSource with no initial data. + */ + public ImmutableInstantTestSource() { + super(Instant.class); + this.longTestSource = new ImmutableLongTestSource(); + this.alternateColumnSource = new UnboxedInstantTestSource(this, longTestSource); + } + + /** + * Create a new InstantTestSource with the given rowSet and data. + * + * @param rowSet The row indexes for the initial data + * @param data The initial data + */ + public ImmutableInstantTestSource(RowSet rowSet, Chunk data) { + super(Instant.class); + if (data.getChunkType() == ChunkType.Long) { + this.longTestSource = new ImmutableLongTestSource(rowSet, data.asLongChunk()); + } else { + this.longTestSource = new ImmutableLongTestSource(rowSet, mapData(data.asObjectChunk())); + } + this.alternateColumnSource = new UnboxedInstantTestSource(this, longTestSource); + } + + private LongChunk mapData(ObjectChunk data) { + final long[] result = new long[data.size()]; + if (result.length > 0 && data.get(0) instanceof Long) { + final ObjectChunk boxedLongChunk = data.asObjectChunk(); + for (int ii = 0; ii < result.length; ++ii) { + result[ii] = TypeUtils.unbox(boxedLongChunk.get(ii)); + } + } else { + final ObjectChunk dtc = data.asObjectChunk(); + for (int ii = 0; ii < result.length; ++ii) { + final Instant dt = dtc.get(ii); + result[ii] = dt == null ? QueryConstants.NULL_LONG : DateTimeUtils.toEpochNano(dt); + } + } + return LongChunk.chunkWrap(result); + } + + @Override + public void add(RowSet rowSet, Chunk data) { + if (data.getChunkType() == ChunkType.Long) { + longTestSource.add(rowSet, data.asLongChunk()); + } else if (data.getChunkType() == ChunkType.Object) { + longTestSource.add(rowSet, mapData(data.asObjectChunk())); + } else { + throw new IllegalArgumentException(); + } + } + + @Override + public void remove(RowSet rowSet) { + longTestSource.remove(rowSet); + } + + public void shift(long startKeyInclusive, long endKeyInclusive, long shiftDelta) { + longTestSource.shift(startKeyInclusive, endKeyInclusive, shiftDelta); + } + + @Override + public Instant get(long index) { + final Long v = longTestSource.get(index); + return v == null ? null : DateTimeUtils.makeInstant(v); + } + + @Override + public boolean isImmutable() { + return false; + } + + @Override + public long getLong(long index) { + return longTestSource.getLong(index); + } + + @Override + public Instant getPrev(long index) { + final Long v = longTestSource.getPrev(index); + return v == null ? null : DateTimeUtils.makeInstant(v); + } + + @Override + public long getPrevLong(long rowKey) { + return longTestSource.getPrevLong(rowKey); + } + + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class; + } + + @Override + public ColumnSource doReinterpret( + @NotNull final Class alternateDataType) throws IllegalArgumentException { + // noinspection unchecked + return (ColumnSource) alternateColumnSource; + } +} diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableIntTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableIntTestSource.java index 4693de41a71..cc38e216e4e 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableIntTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableIntTestSource.java @@ -26,8 +26,8 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to int values. If an uninitialized key is accessed; - * then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is + * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ public class ImmutableIntTestSource extends AbstractColumnSource diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableLongTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableLongTestSource.java index 526e2be970f..498623430ef 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableLongTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableLongTestSource.java @@ -26,8 +26,8 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to long values. If an uninitialized key is accessed; - * then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is + * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ public class ImmutableLongTestSource extends AbstractColumnSource diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableObjectTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableObjectTestSource.java index 30328cf13f0..8f60593c8e8 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableObjectTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableObjectTestSource.java @@ -25,7 +25,7 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to Object values. If an uninitialized key is + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableShortTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableShortTestSource.java index aa377873859..ce9166074b7 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableShortTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ImmutableShortTestSource.java @@ -26,7 +26,7 @@ /** * A test column source that ignores modifications, throws on removals, and adds when requested. *

- * It uses a fastutil open addressed hash map from long RowSet keys to short values. If an uninitialized key is + * It uses a fastutil open addressed hash map from long RowSet keys to column values. If an uninitialized key is * accessed; then an IllegalStateException is thrown. If the test framework attempts to remove or shift values, then an * UnsupportedOperationException is thrown. */ diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/InstantTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/InstantTestSource.java new file mode 100644 index 00000000000..3f491f0b7cd --- /dev/null +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/InstantTestSource.java @@ -0,0 +1,157 @@ +package io.deephaven.engine.testutil.sources; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.ObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.type.TypeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; + +/** + * Instant column source that wraps and delegates the storage to an {@code TestSource}. This also provides an + * interface so this column can be interpreted as a long column (through UnboxedInstantTestSource). + */ +public class InstantTestSource extends AbstractColumnSource + implements MutableColumnSourceGetDefaults.ForObject, TestColumnSource { + + private final LongTestSource longTestSource; + private final UnboxedInstantTestSource alternateColumnSource; + + /** + * Create a new InstantTestSource with no initial data. + */ + public InstantTestSource() { + super(Instant.class); + this.longTestSource = new LongTestSource(); + this.alternateColumnSource = new UnboxedInstantTestSource(this, longTestSource); + } + + /** + * Create a new InstantTestSource with the given rowSet and data. + * + * @param rowSet The row keys for the initial data + * @param data The initial data + */ + public InstantTestSource(RowSet rowSet, Instant[] data) { + super(Instant.class); + this.longTestSource = new LongTestSource(rowSet, mapData(data)); + this.alternateColumnSource = new UnboxedInstantTestSource(this, longTestSource); + } + + /** + * Create a new InstantTestSource with the given rowSet and data. + * + * @param rowSet The row keys for the initial data + * @param data The initial data + */ + public InstantTestSource(RowSet rowSet, Chunk data) { + super(Instant.class); + if (data.getChunkType() == ChunkType.Long) { + this.longTestSource = new LongTestSource(rowSet, data.asLongChunk()); + } else { + this.longTestSource = new LongTestSource(rowSet, mapData(data.asObjectChunk())); + } + + this.alternateColumnSource = new UnboxedInstantTestSource(this, longTestSource); + } + + private LongChunk mapData(Instant[] data) { + final long[] result = new long[data.length]; + for (int ii = 0; ii < result.length; ++ii) { + final Instant dt = data[ii]; + result[ii] = dt == null ? QueryConstants.NULL_LONG : DateTimeUtils.toEpochNano(dt); + } + return LongChunk.chunkWrap(result); + } + + private LongChunk mapData(ObjectChunk data) { + final long[] result = new long[data.size()]; + if (result.length > 0 && data.get(0) instanceof Long) { + final ObjectChunk boxedLongChunk = data.asObjectChunk(); + for (int ii = 0; ii < result.length; ++ii) { + result[ii] = TypeUtils.unbox(boxedLongChunk.get(ii)); + } + } else { + final ObjectChunk dtc = data.asObjectChunk(); + for (int ii = 0; ii < result.length; ++ii) { + final Instant dt = dtc.get(ii); + result[ii] = dt == null ? QueryConstants.NULL_LONG : DateTimeUtils.toEpochNano(dt); + } + } + return LongChunk.chunkWrap(result); + } + + public void add(RowSet rowSet, Instant[] data) { + longTestSource.add(rowSet, mapData(data)); + } + + @Override + public void add(RowSet rowSet, Chunk data) { + if (data.getChunkType() == ChunkType.Long) { + longTestSource.add(rowSet, data.asLongChunk()); + } else if (data.getChunkType() == ChunkType.Object) { + longTestSource.add(rowSet, mapData(data.asObjectChunk())); + } else { + throw new IllegalArgumentException(); + } + } + + @Override + public void remove(RowSet rowSet) { + longTestSource.remove(rowSet); + } + + @Override + public void shift(long startKeyInclusive, long endKeyInclusive, long shiftDelta) { + longTestSource.shift(startKeyInclusive, endKeyInclusive, shiftDelta); + } + + @Override + public Instant get(long rowKey) { + final Long v = longTestSource.get(rowKey); + return v == null ? null : DateTimeUtils.makeInstant(v); + } + + @Override + public boolean isImmutable() { + return false; + } + + @Override + public long getLong(long rowKey) { + return longTestSource.getLong(rowKey); + } + + @Override + public Instant getPrev(long rowKey) { + final Long v = longTestSource.getPrev(rowKey); + return v == null ? null : DateTimeUtils.makeInstant(v); + } + + @Override + public long getPrevLong(long rowKey) { + return longTestSource.getPrevLong(rowKey); + } + + @Override + public boolean allowsReinterpret( + @NotNull final Class alternateDataType) { + return alternateDataType == long.class; + } + + @Override + public ColumnSource doReinterpret( + @NotNull final Class alternateDataType) throws IllegalArgumentException { + // noinspection unchecked + return (ColumnSource) alternateColumnSource; + } +} diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/UnboxedInstantTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/UnboxedInstantTestSource.java new file mode 100644 index 00000000000..dfcfe298f81 --- /dev/null +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/UnboxedInstantTestSource.java @@ -0,0 +1,43 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.testutil.sources; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.sources.UnboxedLongBackedColumnSource; + +import java.time.Instant; + +/** + * Wrap a regular {@link TestColumnSource} to make it reinterpretable as a DateTime column source. + */ +public class UnboxedInstantTestSource extends UnboxedLongBackedColumnSource + implements TestColumnSource { + + // the actual data storage + private final TestColumnSource longTestSource; + + public UnboxedInstantTestSource(ColumnSource alternateColumnSource, + TestColumnSource testColumnSource) { + super(alternateColumnSource); + this.longTestSource = testColumnSource; + } + + @Override + public void add(RowSet rowSet, Chunk data) { + longTestSource.add(rowSet, data); + } + + @Override + public void remove(RowSet rowSet) { + longTestSource.remove(rowSet); + } + + @Override + public void shift(long startKeyInclusive, long endKeyInclusive, long shiftDelta) { + longTestSource.shift(startKeyInclusive, endKeyInclusive, shiftDelta); + } +} diff --git a/engine/time/src/main/java/io/deephaven/time/DateTime.java b/engine/time/src/main/java/io/deephaven/time/DateTime.java index 8fc3fa71828..77464b07be0 100644 --- a/engine/time/src/main/java/io/deephaven/time/DateTime.java +++ b/engine/time/src/main/java/io/deephaven/time/DateTime.java @@ -7,6 +7,7 @@ import io.deephaven.util.QueryConstants; import io.deephaven.util.annotations.ReflexiveUse; import io.deephaven.util.type.TypeUtils; +import org.jetbrains.annotations.NotNull; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; @@ -16,6 +17,8 @@ import java.io.ObjectInput; import java.io.ObjectOutput; import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Date; @@ -26,8 +29,8 @@ * An object representing a timepoint in Deephaven. * *

- * The DateTime object is a timepoint, that is a precise instance in time without respect to timezones. The timepoint is - * stored as a signed 64-bit long nanoseconds since the epoch (January 1, 1970, 00:00:00 GMT). This provides a range + * The DateTime represents a zone-less, precise timepoint without respect to timezones. The instant is stored as a + * signed 64-bit long, representing nanoseconds since the epoch (January 1, 1970, 00:00:00 GMT). This provides a range * from 1677-09-21T00:12:43.146-775807 UTC to 2262-04-11T23:47:16.854775807 UTC. The minimum long value is reserved for * {@link QueryConstants#NULL_LONG} and therefore is not permitted as a valid DateTime. *

@@ -37,12 +40,12 @@ public final class DateTime implements Comparable, Externalizable { private static final long serialVersionUID = -9077991715632523353L; + private static final DateTimeFormatter JODA_DATE_TIME_FORMAT = + DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"); + private static final DateTimeFormatter JODA_DATE_FORMAT = DateTimeFormat.forPattern("yyyy-MM-dd"); private long nanos; - private static final DateTimeFormatter dateTimeFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"); - private static final DateTimeFormatter dateFormat = DateTimeFormat.forPattern("yyyy-MM-dd"); - public static DateTime of(Instant instant) { return new DateTime(DateTimeUtils.nanos(instant)); } @@ -76,13 +79,6 @@ public static DateTime ofMillis(Clock clock) { return new DateTime(Math.multiplyExact(clock.currentTimeMillis(), 1_000_000)); } - /** - * Create a new DateTime initialized to the epoch. - */ - public DateTime() { - // for Externalizable - } - /** * Create a new DateTime initialized to the current system time. Based on {@link Clock#system()}. Equivalent to * {@code of(Clock.system())}. @@ -115,6 +111,13 @@ public static DateTime nowMillis() { return ofMillis(Clock.system()); } + /** + * Create a new DateTime initialized to the epoch. + */ + public DateTime() { + // for Externalizable + } + /** * Create a new DateTime initialized to the provided nanoseconds since the epoch. * @@ -124,6 +127,7 @@ public DateTime(long nanos) { this.nanos = nanos; } + // region Numeric representations /** * Get this time represented as nanoseconds since the epoch * @@ -135,7 +139,7 @@ public long getNanos() { /** * Get this time represented as microseconds since the epoch - * + * * @return the number of microseconds since the epoch */ public long getMicros() { @@ -144,7 +148,7 @@ public long getMicros() { /** * Get this time represented as milliseconds since the epoch - * + * * @return the number of milliseconds since the epoch */ public long getMillis() { @@ -159,25 +163,19 @@ public long getMillis() { public long getNanosPartial() { return nanos % 1000000; } + // region Numeric representations - /** - * Convert this DateTime to a Java Date. - * - * This DateTime will be truncated to milliseconds. - * - * @return a Java Date representing this DateTime - */ - public Date getDate() { - return new Date(getMillis()); - } - + // region Mutations to other DateTime types + // region Joda DateTime flavors /** * Convert this DateTime to a Joda DateTime. * * This DateTime will be truncated to milliseconds. * * @return a Joda DateTime representing this DateTime + * @deprecated use {@link #toZonedDateTime(ZoneId)} instead */ + @Deprecated public org.joda.time.DateTime getJodaDateTime() { return new org.joda.time.DateTime(getMillis()); } @@ -190,20 +188,165 @@ public org.joda.time.DateTime getJodaDateTime() { * @param timeZone the timezone for the created Joda DateTime * * @return a Joda DateTime representing this DateTime + * @deprecated use {@link #toZonedDateTime(ZoneId)} instead */ + @Deprecated public org.joda.time.DateTime getJodaDateTime(TimeZone timeZone) { return new org.joda.time.DateTime(getMillis(), timeZone.getTimeZone()); } + // endregion + + // region Java DateTime flavors + /** + * Get a {@link ZonedDateTime} version of this {@link DateTime} at the {@link ZoneId#systemDefault() system default} + * time zone. + * + * @return a {@link ZonedDateTime} + */ + @NotNull + public ZonedDateTime toZonedDateTime() { + return toZonedDateTime(ZoneId.systemDefault()); + } + + /** + * Get a {@link ZonedDateTime} version of this {@link DateTime} at the specified time zone. + * + * @return a {@link ZonedDateTime} + */ + @NotNull + public ZonedDateTime toZonedDateTime(@NotNull final String zone) { + return toZonedDateTime(ZoneId.of(zone)); + } + + /** + * Get a {@link ZonedDateTime} version of this {@link DateTime} at the specified time zone. + * + * @return a {@link ZonedDateTime} + */ + @NotNull + public ZonedDateTime toZonedDateTime(@NotNull final TimeZone zone) { + return toZonedDateTime(zone.getZoneId()); + } + + /** + * Get a {@link ZonedDateTime} version of this {@link DateTime} at the specified time zone. + * + * @return a {@link ZonedDateTime} + */ + @NotNull + public ZonedDateTime toZonedDateTime(@NotNull final ZoneId zone) { + return ZonedDateTime.ofInstant(getInstant(), zone); + } + + /** + * Get a {@link LocalDate} representing the date of this {@link DateTime} at the {@link ZoneId#systemDefault() + * system default} time zone. + * + * @return the {@link LocalDate} + */ + @NotNull + public LocalDate toLocalDate() { + return toLocalDate(ZoneId.systemDefault()); + } + + /** + * Get a {@link LocalDate} representing the date of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalDate} + */ + @NotNull + public LocalDate toLocalDate(@NotNull final String zone) { + return toLocalDate(ZoneId.of(zone)); + } + + /** + * Get a {@link LocalDate} representing the date of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalDate} + */ + @NotNull + public LocalDate toLocalDate(@NotNull final TimeZone zone) { + return toLocalDate(zone.getZoneId()); + } + + /** + * Get a {@link LocalDate} representing the date of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalDate} + */ + @NotNull + public LocalDate toLocalDate(@NotNull final ZoneId zone) { + return toZonedDateTime(zone).toLocalDate(); + } + + /** + * Get a {@link LocalTime} representing the time of day of this {@link DateTime} at the + * {@link ZoneId#systemDefault() system default} time zone. + * + * @return the {@link LocalTime} + */ + @NotNull + public LocalTime toLocalTime() { + return toLocalTime(ZoneId.systemDefault()); + } + + /** + * Get a {@link LocalTime} representing the time of day of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalTime} + */ + @NotNull + public LocalTime toLocalTime(@NotNull final String zone) { + return toLocalTime(ZoneId.of(zone)); + } + + /** + * Get a {@link LocalTime} representing the time of day of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalTime} + */ + @NotNull + public LocalTime toLocalTime(@NotNull final TimeZone zone) { + return toLocalTime(zone.getZoneId()); + } + + /** + * Get a {@link LocalTime} representing the time of day of this {@link DateTime} at the specified time zone. + * + * @return the {@link LocalTime} + */ + @NotNull + public LocalTime toLocalTime(@NotNull final ZoneId zone) { + return toZonedDateTime(zone).toLocalTime(); + } + + /** + * Convert this DateTime to a Java Date. + * + * This DateTime will be truncated to milliseconds. + * + * @return a Java Date representing this DateTime + * @deprecated use {@link #toZonedDateTime()} instead. + */ + @Deprecated + @NotNull + public Date getDate() { + return new Date(getMillis()); + } /** * Convert this DateTime to a Java Instant. * * @return a Java Instant representing this DateTime */ + @NotNull public Instant getInstant() { return Instant.ofEpochSecond(0, nanos); } + // endregion + // endregion + // region Object hashing / Comparison @Override public boolean equals(final Object that) { if (this == that) { @@ -225,7 +368,9 @@ public int hashCode() { public int compareTo(DateTime dateTime) { return (nanos < dateTime.nanos ? -1 : (nanos == dateTime.nanos ? 0 : 1)); } + // endregion + // region String formatting @Override public String toString() { return toString(TimeZone.TZ_DEFAULT); @@ -242,8 +387,9 @@ public String toString() { * @param timeZone the timezone for formatting the string * @return a String representation of this DateTime */ - public String toString(TimeZone timeZone) { - return dateTimeFormat.withZone(timeZone.getTimeZone()).print(getMillis()) + @NotNull + public String toString(@NotNull final TimeZone timeZone) { + return JODA_DATE_TIME_FORMAT.withZone(timeZone.getTimeZone()).print(getMillis()) + DateTimeUtils.pad(String.valueOf(getNanosPartial()), 6) + " " + timeZone.toString().substring(3); } @@ -252,6 +398,7 @@ public String toString(TimeZone timeZone) { * * @return The date (yyyy-MM-dd) represented by this {@code DateTime} in the default {@link TimeZone}. */ + @NotNull public String toDateString() { return toDateString(TimeZone.TZ_DEFAULT); } @@ -262,47 +409,61 @@ public String toDateString() { * @param timeZone a TimeZone * @return The date (yyyy-MM-dd) represented by this {@code DateTime} in the given timeZone. */ - public String toDateString(TimeZone timeZone) { - return dateFormat.withZone(timeZone.getTimeZone()).print(getMillis()); + @NotNull + public String toDateString(@NotNull final TimeZone timeZone) { + // noinspection ConstantConditions + if (timeZone == null) { + throw new IllegalArgumentException("timeZone cannot be null"); + } + return JODA_DATE_FORMAT.withZone(timeZone.getTimeZone()).print(getMillis()); } /** - * Get the date represented by this DateTime in the given joda {@code DateTimeZone}. + * Get the date represented by this DateTime in the given Joda {@code DateTimeZone} in ISO date format yyyy-mm. * * @param timeZone A joda DateTimeZone * @return The date (yyyy-MM-dd) represented by this {@code DateTime} in the given {@code timeZone} */ - public String toDateString(DateTimeZone timeZone) { + @NotNull + public String toDateString(@NotNull final DateTimeZone timeZone) { + // noinspection ConstantConditions if (timeZone == null) { throw new IllegalArgumentException("timeZone cannot be null"); } - return dateFormat.withZone(timeZone).print(getMillis()); + return JODA_DATE_FORMAT.withZone(timeZone).print(getMillis()); } /** - * Get the date represented by this DateTime in the time zone specified by {@code zoneId} + * Get the date represented by this DateTime in the time zone specified by {@code zoneId} in + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE ISO} date format. * * @param zoneId A java time zone ID string * @return The date (yyyy-MM-dd) represented by this {@code DateTime} in time zone represented by the given * {@code zoneId} */ - public String toDateString(String zoneId) { + @NotNull + public String toDateString(@NotNull final String zoneId) { return toDateString(ZoneId.of(zoneId)); } /** - * Get the date represented by this DateTime in the given java {@code ZoneId}. + * Get the date represented by this DateTime in the given java {@code ZoneId} in + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE ISO} date format. * * @param timeZone A java {@link ZoneId time zone ID}. * @return The date (yyyy-MM-dd) represented by this {@code DateTime} in the given {@code timeZone} */ - public String toDateString(ZoneId timeZone) { + @NotNull + public String toDateString(@NotNull final ZoneId timeZone) { + // noinspection ConstantConditions if (timeZone == null) { throw new IllegalArgumentException("timeZone cannot be null"); } return ISO_LOCAL_DATE.format(ZonedDateTime.ofInstant(getInstant(), timeZone)); } + // endregion + // region Externalizable public void writeExternal(ObjectOutput out) throws IOException { out.writeLong(nanos); } @@ -310,4 +471,5 @@ public void writeExternal(ObjectOutput out) throws IOException { public void readExternal(ObjectInput in) throws IOException { nanos = in.readLong(); } + // endregion } diff --git a/engine/time/src/main/java/io/deephaven/time/DateTimeUtils.java b/engine/time/src/main/java/io/deephaven/time/DateTimeUtils.java index 567ff3a1ee1..993d360620c 100644 --- a/engine/time/src/main/java/io/deephaven/time/DateTimeUtils.java +++ b/engine/time/src/main/java/io/deephaven/time/DateTimeUtils.java @@ -11,31 +11,28 @@ import io.deephaven.configuration.Configuration; import io.deephaven.function.Numeric; import io.deephaven.util.QueryConstants; -import io.deephaven.time.calendar.BusinessCalendar; -import io.deephaven.time.calendar.Calendars; import io.deephaven.util.annotations.ScriptApi; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.joda.time.DateMidnight; -import org.joda.time.DateTimeZone; import org.joda.time.DurationFieldType; -import java.text.SimpleDateFormat; import java.time.*; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; -import java.util.Calendar; -import java.util.Date; import java.util.HashMap; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static io.deephaven.util.QueryConstants.NULL_LONG; + /** * Utilities for Deephaven date/time storage and manipulation. */ -@SuppressWarnings("UnusedDeclaration") +@SuppressWarnings("unused") public class DateTimeUtils { public static final DateTime[] ZERO_LENGTH_DATETIME_ARRAY = new DateTime[0]; @@ -70,10 +67,6 @@ public class DateTimeUtils { // DateTime literals private static final Pattern DATETIME_PATTERN = Pattern.compile( "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9](T[0-9][0-9]?:[0-9][0-9](:[0-9][0-9])?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?)? [a-zA-Z]+"); - private static final Pattern JIM_DATETIME_PATTERN = Pattern.compile( - "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); - private static final Pattern JIM_MICROS_DATETIME_PATTERN = Pattern.compile( - "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); private static final Pattern TIME_AND_DURATION_PATTERN = Pattern.compile( "\\-?([0-9]+T)?([0-9]+):([0-9]+)(:[0-9]+)?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?"); private static final Pattern PERIOD_PATTERN = Pattern.compile( @@ -156,19 +149,6 @@ private enum DateGroupId { private static final double YEARS_PER_NANO = 1. / (double) YEAR; - /** - * Allows setting an alternate date instead of "today" to be returned from {@link #currentDateNy}. This is mainly - * used when setting up for a replay simulation. - */ - public static String currentDateNyOverride; - - /** - * Allows setting an alternate date instead of the business day before "today" to be returned from - * {@link #lastBusinessDateNy}. This is mainly used when setting up for a replay simulation. - */ - @SuppressWarnings("WeakerAccess") - public static String lastBusinessDayNyOverride; - // TODO(deephaven-core#3044): Improve scaffolding around full system replay /** * Allows setting a custom clock instead of actual current time. This is mainly used when setting up for a replay @@ -185,7 +165,7 @@ private enum DateGroupId { */ public static long millis(DateTime dateTime) { if (dateTime == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return dateTime.getMillis(); @@ -199,19 +179,26 @@ public static long millis(DateTime dateTime) { */ public static long nanos(DateTime dateTime) { if (dateTime == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return dateTime.getNanos(); } + /** + * Returns nanoseconds since Epoch for an {@link Instant} value. + * + * @param instant The {@link Instant} for which the nanoseconds offset should be returned. + * @return A long value of nanoseconds since Epoch, or a NULL_LONG value if the {@link Instant} is null. + */ public static long nanos(Instant instant) { if (instant == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return Math.addExact(TimeUnit.SECONDS.toNanos(instant.getEpochSecond()), instant.getNano()); } + // region Comparisons /** * Evaluates whether one {@link DateTime} value is earlier than a second {@link DateTime} value. * @@ -255,7 +242,7 @@ public static boolean isAfter(DateTime d1, DateTime d2) { * nanoseconds from Epoch. */ public static DateTime plus(DateTime dateTime, long nanos) { - if (dateTime == null || nanos == io.deephaven.util.QueryConstants.NULL_LONG) { + if (dateTime == null || nanos == NULL_LONG) { return null; } @@ -273,7 +260,7 @@ public static DateTime plus(DateTime dateTime, long nanos) { * nanoseconds from Epoch. */ public static DateTime minus(DateTime dateTime, long nanos) { - if (dateTime == null || -nanos == io.deephaven.util.QueryConstants.NULL_LONG) { + if (dateTime == null || -nanos == NULL_LONG) { return null; } @@ -343,7 +330,7 @@ public static DateTime minus(DateTime dateTime, Period period) { */ public static long minus(DateTime d1, DateTime d2) { if (d1 == null || d2 == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return checkUnderflowMinus(d1.getNanos(), d2.getNanos(), true); @@ -432,6 +419,7 @@ public static double diffDay(DateTime start, DateTime end) { return (double) diffNanos(start, end) / DAY; } + // endregion /** * Returns a {@link DateTime} for the requested {@link DateTime} at midnight in the specified time zone. @@ -465,7 +453,7 @@ public static DateTime millisToDateAtMidnightNy(final long millis) { } /** - * Returns a {@link DateTime} representing midnight in a selected time zone on the date specified by the a number of + * Returns a {@link DateTime} representing midnight in a selected time zone on the date specified by a number of * milliseconds from Epoch. * * @param millis A long value of the number of milliseconds from Epoch for which the {@link DateTime} is to be @@ -476,13 +464,14 @@ public static DateTime millisToDateAtMidnightNy(final long millis) { */ @SuppressWarnings("WeakerAccess") public static DateTime millisToDateAtMidnight(final long millis, final TimeZone timeZone) { - if (millis == io.deephaven.util.QueryConstants.NULL_LONG) { + if (millis == NULL_LONG) { return null; } return new DateTime(millisToNanos(new DateMidnight(millis, timeZone.getTimeZone()).getMillis())); } + // region Formatting /** * Returns a String date/time representation. * @@ -586,6 +575,7 @@ public static String format(long nanos) { return buf.toString(); } + // endregion static String pad(@NotNull final String str, final int length) { if (length <= str.length()) { @@ -594,6 +584,7 @@ static String pad(@NotNull final String str, final int length) { return "0".repeat(length - str.length()) + str; } + // region Chronology Getters /** * Returns an int value of the day of the month for a {@link DateTime} and specified time zone. * @@ -782,7 +773,7 @@ public static int millisOfSecondNy(DateTime dateTime) { @SuppressWarnings("WeakerAccess") public static long nanosOfDay(DateTime dateTime, TimeZone timeZone) { if (dateTime == null || timeZone == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfDay()) + dateTime.getNanosPartial(); @@ -813,7 +804,7 @@ public static long nanosOfDayNy(DateTime dateTime) { @SuppressWarnings("WeakerAccess") public static long nanosOfSecond(DateTime dateTime, TimeZone timeZone) { if (dateTime == null || timeZone == null) { - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfSecond()) + dateTime.getNanosPartial(); @@ -1069,7 +1060,9 @@ public static int yearOfCentury(DateTime dateTime, TimeZone timeZone) { public static int yearOfCenturyNy(DateTime dateTime) { return yearOfCentury(dateTime, TimeZone.TZ_NY); } + // endregion + // region Base and Unit conversion /** * Returns the Excel double time format representation of a {@link DateTime}. * @@ -1122,8 +1115,8 @@ public static double getExcelDateTime(DateTime dateTime) { * containing the equivalent number of nanoseconds for the input in microseconds. */ public static long microsToNanos(long micros) { - if (micros == io.deephaven.util.QueryConstants.NULL_LONG) { - return io.deephaven.util.QueryConstants.NULL_LONG; + if (micros == NULL_LONG) { + return NULL_LONG; } if (Math.abs(micros) > MAX_CONVERTIBLE_MICROS) { throw new DateTimeOverflowException("Converting " + micros + " micros to nanos would overflow"); @@ -1140,8 +1133,8 @@ public static long microsToNanos(long micros) { */ @SuppressWarnings("WeakerAccess") public static long nanosToMicros(long nanos) { - if (nanos == io.deephaven.util.QueryConstants.NULL_LONG) { - return io.deephaven.util.QueryConstants.NULL_LONG; + if (nanos == NULL_LONG) { + return NULL_LONG; } return nanos / 1000; } @@ -1166,8 +1159,8 @@ public static DateTime microsToTime(long micros) { * the equivalent number of nanoseconds to the input. */ public static long millisToNanos(long millis) { - if (millis == io.deephaven.util.QueryConstants.NULL_LONG) { - return io.deephaven.util.QueryConstants.NULL_LONG; + if (millis == NULL_LONG) { + return NULL_LONG; } if (Math.abs(millis) > MAX_CONVERTIBLE_MILLIS) { throw new DateTimeOverflowException("Converting " + millis + " millis to nanos would overflow"); @@ -1184,8 +1177,8 @@ public static long millisToNanos(long millis) { * containing the equivalent number of nanoseconds for the input in seconds. */ public static long secondsToNanos(long seconds) { - if (seconds == io.deephaven.util.QueryConstants.NULL_LONG) { - return io.deephaven.util.QueryConstants.NULL_LONG; + if (seconds == NULL_LONG) { + return NULL_LONG; } if (Math.abs(seconds) > MAX_CONVERTIBLE_SECONDS) { throw new DateTimeOverflowException("Converting " + seconds + " seconds to nanos would overflow"); @@ -1202,8 +1195,8 @@ public static long secondsToNanos(long seconds) { * equivalent number of milliseconds for the input in nanoseconds. */ public static long nanosToMillis(long nanos) { - if (nanos == io.deephaven.util.QueryConstants.NULL_LONG) { - return io.deephaven.util.QueryConstants.NULL_LONG; + if (nanos == NULL_LONG) { + return NULL_LONG; } return nanos / 1000000; @@ -1231,6 +1224,7 @@ public static DateTime secondsToTime(long seconds) { return nanosToTime(secondsToNanos(seconds)); } + /** * Returns the current clock. The current clock is {@link #clock} if set, otherwise {@link Clock#system()}. * @@ -1240,128 +1234,182 @@ public static Clock currentClock() { return Objects.requireNonNullElse(clock, Clock.system()); } + private static long safeComputeNanos(long epochSecond, long nanoOfSecond) { + if (epochSecond >= MAX_CONVERTIBLE_SECONDS) { + throw new IllegalArgumentException("Numeric overflow detected during conversion of " + epochSecond + + " to nanoseconds"); + } + + return epochSecond * 1_000_000_000L + nanoOfSecond; + } + /** - * Equivalent to {@code DateTime.of(currentClock())}. + * Convert the specified instant to nanoseconds since epoch, or {@link QueryConstants#NULL_LONG null}. * - * @return the current date time + * @param value the instant to convert + * + * @return nanoseconds since epoch or {@link QueryConstants#NULL_LONG null} */ - @ScriptApi - public static DateTime currentTime() { - return DateTime.of(currentClock()); + public static long toEpochNano(@Nullable final Instant value) { + if (value == null) { + return NULL_LONG; + } + + return safeComputeNanos(value.getEpochSecond(), value.getNano()); } /** - * Equivalent to {@code DateTime.ofMillis(currentClock())}. + * Convert the specified {@link ZonedDateTime} to nanoseconds since epoch, or {@link QueryConstants#NULL_LONG null}. * - * @return the current date time + * @param value the instant to convert + * + * @return nanoseconds since epoch or {@link QueryConstants#NULL_LONG null} */ - public static DateTime currentTimeMillis() { - return DateTime.ofMillis(currentClock()); - } - - // TODO: Revoke public access to these fields and retire them! Use getCurrentDate(), maybe hold on to the - // CachedCurrentDate to skip a map lookup. - public static String currentDateNy = null; + public static long toEpochNano(@Nullable final ZonedDateTime value) { + if (value == null) { + return NULL_LONG; + } - public static long endOfCurrentDateNy = 0; + return safeComputeNanos(value.toEpochSecond(), value.getNano()); + } /** - * Provides a String representing the current date in the New York time zone or, if a custom - * {@link #currentDateNyOverride} has been set, the date provided by that override. + * Convert nanos since epoch to an {@link Instant} value. * - * @return A String in yyyy-MM-dd format. + * @param nanos nanoseconds since epoch + * @return a new {@link Instant} or null if nanos was {@link QueryConstants#NULL_LONG}. */ - public static String currentDateNy() { - if (currentDateNyOverride != null) { - return currentDateNyOverride; - } - final long currentTimeMillis = System.currentTimeMillis(); - if (currentTimeMillis > endOfCurrentDateNy) { - final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); - format.setTimeZone(TimeZones.TZ_NEWYORK); - currentDateNy = format.format(new Date(currentTimeMillis)); + @Nullable + public static Instant makeInstant(final long nanos) { + return nanos == NULL_LONG ? null : Instant.ofEpochSecond(nanos / 1_000_000_000L, nanos % 1_000_000_000L); + } - // Calculate when this cached value expires - endOfCurrentDateNy = getMillisAtMidnightNy(currentTimeMillis); - } - return currentDateNy; + /** + * Converts nanos of epoch to a {@link ZonedDateTime} using the {@link TimeZone#TZ_DEFAULT default} time zone. + * + * @param nanos nanoseconds since epoch + * @return a new {@link ZonedDateTime} or null if nanos was {@link QueryConstants#NULL_LONG}. + */ + @Nullable + public static ZonedDateTime makeZonedDateTime(final long nanos) { + return makeZonedDateTime(nanos, TimeZone.TZ_DEFAULT.getZoneId()); } /** - * Sets the {@link #lastBusinessDayNyOverride} to the previous business day from a currently set - * {@link #currentDateNyOverride} value. If {@link #currentDateNyOverride} has not been set, this method has no - * effect. + * Converts nanos of epoch to a {@link ZonedDateTime}. + * + * @param nanos nanoseconds since epoch + * @param timeZone the {@link TimeZone time zone} + * + * @return a new {@link ZonedDateTime} or null if nanos was {@link QueryConstants#NULL_LONG}. */ - public static void overrideLastBusinessDateNyFromCurrentDateNy() { - if (currentDateNyOverride != null) { - final BusinessCalendar bc = Calendars.calendar("USNYSE"); - lastBusinessDayNyOverride = bc.previousBusinessDay(currentDateNyOverride.substring(0, 10)); - } + @Nullable + public static ZonedDateTime makeZonedDateTime(final long nanos, @NotNull final TimeZone timeZone) { + return makeZonedDateTime(nanos, timeZone.getZoneId()); } /** - * Cached value of lastBusinessDateNy, which expires after milliseconds from Epoch value of - * {@link #endOfCurrentDateNyLastBusinessDay} + * Converts nanos of epoch to a {@link ZonedDateTime}. + * + * @param nanos nanoseconds since epoch + * @param zone the {@link ZoneId time zone} + * + * @return a new {@link ZonedDateTime} or null if nanos was {@link QueryConstants#NULL_LONG}. */ - public static String lastBusinessDateNy = null; + @Nullable + public static ZonedDateTime makeZonedDateTime(final long nanos, ZoneId zone) { + // noinspection ConstantConditions + return nanos == NULL_LONG ? null : ZonedDateTime.ofInstant(makeInstant(nanos), zone); + } + /** - * Expiration for cached {@link #lastBusinessDateNy} as milliseconds from Epoch. + * Converts a {@link DateTime} to a {@link ZonedDateTime}. + * + * @param dateTime The a {@link DateTime} to convert. + * @return A {@link ZonedDateTime} using the default time zone for the session as indicated by + * {@link TimeZone#TZ_DEFAULT}. */ - public static long endOfCurrentDateNyLastBusinessDay = 0; + @Nullable + public static ZonedDateTime getZonedDateTime(final @Nullable DateTime dateTime) { + return getZonedDateTime(dateTime, TimeZone.TZ_DEFAULT); + } /** - * Provides a String representing the previous business date in the New York time zone using the NYSE calendar, or, - * if a custom {@link #lastBusinessDayNyOverride} has been set, the date provided by that override. + * Converts a {@link DateTime} to a {@link ZonedDateTime}. * - * @return A String in yyyy-MM-dd format. + * @param dateTime The a {@link DateTime} to convert. + * @param timeZone The {@link TimeZone} to use for the conversion. + * @return A {@link ZonedDateTime} using the specified time zone. or null if dateTime was null */ - public static String lastBusinessDateNy() { - return lastBusinessDateNy(System.currentTimeMillis()); + @Nullable + public static ZonedDateTime getZonedDateTime(@Nullable final DateTime dateTime, @NotNull final TimeZone timeZone) { + if (dateTime == null) { + return null; + } + + final ZoneId zone = timeZone.getTimeZone().toTimeZone().toZoneId(); + return dateTime.toZonedDateTime(zone); } /** - * Provides a String representing the previous business date in the New York time zone using the NYSE calendar, or, - * if a custom {@link #lastBusinessDayNyOverride} has been set, the date provided by that override. + * Converts a {@link DateTime} to a {@link ZonedDateTime}. * - * @param currentTimeMillis The current date/time in milliseconds from Epoch to be used when determining the - * previous business date. Typically this is System.currentTimeMillis() and is passed in by calling the - * niladic variant of this method. - * @return A String in yyyy-MM-dd format. + * @param dateTime The a {@link DateTime} to convert. + * @param timeZone The {@link ZoneId} to use for the conversion. + * @return A {@link ZonedDateTime} using the specified time zone. or null if dateTime was null */ - public static String lastBusinessDateNy(final long currentTimeMillis) { - if (lastBusinessDayNyOverride != null) { - return lastBusinessDayNyOverride; + @Nullable + public static ZonedDateTime getZonedDateTime(@Nullable final DateTime dateTime, @NotNull final ZoneId timeZone) { + if (dateTime == null) { + return null; } - if (currentTimeMillis > endOfCurrentDateNyLastBusinessDay) { - final BusinessCalendar bc = Calendars.calendar("USNYSE"); + return dateTime.toZonedDateTime(timeZone); + } + + /** + * Converts a {@link ZonedDateTime} to a {@link DateTime}. + * + * @param zonedDateTime The a {@link ZonedDateTime} to convert. + * @throws DateTimeOverflowException if the input is out of the range for a {@link DateTime}, otherwise, a + * {@link DateTime} version of the input. + */ + @Nullable + public static DateTime toDateTime(@Nullable final ZonedDateTime zonedDateTime) { + if (zonedDateTime == null) { + return null; + } - lastBusinessDateNy = bc.previousBusinessDay(DateTimeUtils.millisToTime(currentTimeMillis)); + int nanos = zonedDateTime.getNano(); + long seconds = zonedDateTime.toEpochSecond(); - // Calculate when this cached value expires - endOfCurrentDateNyLastBusinessDay = getMillisAtMidnightNy(currentTimeMillis); + long limit = (Long.MAX_VALUE - nanos) / DateTimeUtils.SECOND; + if (seconds >= limit) { + throw new DateTimeOverflowException("Overflow: cannot convert " + zonedDateTime + " to new DateTime"); } - return lastBusinessDateNy; + + return new DateTime(nanos + (seconds * DateTimeUtils.SECOND)); } + // endregion + // region Query Helper Methods /** - * Returns the number of milliseconds from Epoch for midnight at the beginning of the next day in the New York time - * zone relative to the date represented by a passed milliseconds from Epoch date/time. + * Equivalent to {@code DateTime.of(currentClock())}. * - * @param currentTimeMillis A long value of milliseconds from Epoch which is the date/time from which the next New - * York time zone midnight value should be calculated. - * @return A long of milliseconds from Epoch for midnight at the beginning of the next day in the New York time - * zone. + * @return the current date time */ - private static long getMillisAtMidnightNy(final long currentTimeMillis) { - final Calendar calendar = Calendar.getInstance(TimeZones.TZ_NEWYORK); - calendar.setTimeInMillis(currentTimeMillis); - calendar.set(Calendar.MILLISECOND, 0); - calendar.set(Calendar.SECOND, 0); - calendar.set(Calendar.MINUTE, 0); - calendar.set(Calendar.HOUR_OF_DAY, 0); - calendar.add(Calendar.DAY_OF_YEAR, 1); // should handle daylight savings - return calendar.getTimeInMillis(); + @ScriptApi + public static DateTime currentTime() { + return DateTime.of(currentClock()); + } + + /** + * Equivalent to {@code DateTime.ofMillis(currentClock())}. + * + * @return the current date time + */ + public static DateTime currentTimeMillis() { + return DateTime.ofMillis(currentClock()); } private abstract static class CachedDate { @@ -1437,7 +1485,7 @@ public static String currentDate(TimeZone timeZone) { * {@link QueryConstants#NULL_LONG NULL_LONG}. */ public static DateTime nanosToTime(long nanos) { - return nanos == io.deephaven.util.QueryConstants.NULL_LONG ? null : new DateTime(nanos); + return nanos == NULL_LONG ? null : new DateTime(nanos); } /** @@ -1468,7 +1516,7 @@ public static DateTime autoEpochToTime(long epoch) { * corresponding to the passed in epoch value. */ public static long autoEpochToNanos(final long epoch) { - if (epoch == io.deephaven.util.QueryConstants.NULL_LONG) { + if (epoch == NULL_LONG) { return epoch; } final long absEpoch = Math.abs(epoch); @@ -1512,7 +1560,7 @@ public static DateTime cappedTimeOffset(DateTime original, Period period, DateTi * @return Null if either input is null, otherwise a {@link DateTime} representing the start of the window. */ public static DateTime lowerBin(DateTime dateTime, long intervalNanos) { - if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG) { + if (dateTime == null || intervalNanos == NULL_LONG) { return null; } @@ -1531,7 +1579,7 @@ public static DateTime lowerBin(DateTime dateTime, long intervalNanos) { * @return Null if either input is null, otherwise a {@link DateTime} representing the start of the window. */ public static DateTime lowerBin(DateTime dateTime, long intervalNanos, long offset) { - if (dateTime == null || intervalNanos == QueryConstants.NULL_LONG || offset == QueryConstants.NULL_LONG) { + if (dateTime == null || intervalNanos == NULL_LONG || offset == NULL_LONG) { return null; } @@ -1548,7 +1596,7 @@ public static DateTime lowerBin(DateTime dateTime, long intervalNanos, long offs * @return Null if either input is null, otherwise a {@link DateTime} representing the end of the window. */ public static DateTime upperBin(DateTime dateTime, long intervalNanos) { - if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG) { + if (dateTime == null || intervalNanos == NULL_LONG) { return null; } @@ -1567,15 +1615,14 @@ public static DateTime upperBin(DateTime dateTime, long intervalNanos) { * @return Null if either input is null, otherwise a {@link DateTime} representing the end of the window. */ public static DateTime upperBin(DateTime dateTime, long intervalNanos, long offset) { - if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG - || offset == io.deephaven.util.QueryConstants.NULL_LONG) { + if (dateTime == null || intervalNanos == NULL_LONG + || offset == NULL_LONG) { return null; } return nanosToTime(Numeric.upperBin(dateTime.getNanos() - offset, intervalNanos) + offset); } - - // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ + // endregion // + can only result in flow if both positive or both negative private static long checkOverflowPlus(final long l1, final long l2, final boolean minusOperation) { @@ -1624,7 +1671,7 @@ public static Result convertExpression(String formula) throws Exception { // TOD final StringBuilder instanceVariablesString = new StringBuilder(); final HashMap> newVariables = new HashMap<>(); - final StringBuffer convertedFormula = new StringBuffer(); + final StringBuilder convertedFormula = new StringBuilder(); int localDateIndex = 0; int dateTimeIndex = 0; @@ -1656,7 +1703,7 @@ public static Result convertExpression(String formula) throws Exception { // TOD .append("\");\n"); newVariables.put("_localDate" + localDateIndex, LocalDate.class); localDateIndex++; - } else if (convertTimeQuiet(s) != io.deephaven.util.QueryConstants.NULL_LONG) { + } else if (convertTimeQuiet(s) != NULL_LONG) { matcher.appendReplacement(convertedFormula, "_time" + timeIndex); instanceVariablesString.append(" private long _time").append(timeIndex) .append("=DateTimeUtils.convertTime(\"").append(formula, matcher.start() + 1, matcher.end() - 1) @@ -1723,7 +1770,7 @@ public static long expressionToNanos(String formula) { return dateTime.getNanos(); } long time = convertTimeQuiet(s); - if (time != io.deephaven.util.QueryConstants.NULL_LONG) { + if (time != NULL_LONG) { return time; } final Period period = convertPeriodQuiet(s); @@ -1788,7 +1835,7 @@ public static DateTime convertDateTime(String s) { public static long convertTime(String s) { long ret = convertTimeQuiet(s); - if (ret == io.deephaven.util.QueryConstants.NULL_LONG) { + if (ret == NULL_LONG) { throw new RuntimeException("Cannot parse time : " + s); } @@ -1952,42 +1999,6 @@ public static LocalDate convertDateQuiet(String s, DateStyle dateStyle) { return null; } - /* - * This version assumes you know what date it is and that the format is correct and just want the time, so we can - * save time (e.g. 2010-09-02T08:17:17.502-0400) 0123456789012345678901234567 1 2 - */ - - @SuppressWarnings("WeakerAccess") - public static DateTime convertJimDateTimeQuiet(String s) { - int year = extractFourDigitNum(s, 0); - int month = extractTwoDigitNum(s, 5); - int day = extractTwoDigitNum(s, 8); - int hour = extractTwoDigitNum(s, 11); - int min = extractTwoDigitNum(s, 14); - int sec = extractTwoDigitNum(s, 17); - int millis = extractThreeDigitNum(s, 20); - int tzHours = (s.charAt(23) == '-' ? -1 : 1) * extractTwoDigitNum(s, 24); - DateTimeZone timeZone = DateTimeZone.forOffsetHours(tzHours); - org.joda.time.DateTime d = new org.joda.time.DateTime(year, month, day, hour, min, sec, millis, timeZone); - return new DateTime(millisToNanos(d.getMillis())); - } - - @SuppressWarnings("WeakerAccess") - public static DateTime convertJimMicrosDateTimeQuiet(String s) { - int year = extractFourDigitNum(s, 0); - int month = extractTwoDigitNum(s, 5); - int day = extractTwoDigitNum(s, 8); - int hour = extractTwoDigitNum(s, 11); - int min = extractTwoDigitNum(s, 14); - int sec = extractTwoDigitNum(s, 17); - int micros = extractSixDigitNum(s, 20); - int tzHours = (s.charAt(26) == '-' ? -1 : 1) * extractTwoDigitNum(s, 27); - DateTimeZone timeZone = DateTimeZone.forOffsetHours(tzHours); - org.joda.time.DateTime d = - new org.joda.time.DateTime(year, month, day, hour, min, sec, micros / 1000, timeZone); - return new DateTime(millisToNanos(d.getMillis()) + (micros % 1000) * 1000); - } - /** * Converts a {@link DateTime} String from a few specific zoned formats to a {@link DateTime}. * @@ -2014,10 +2025,6 @@ public static DateTime convertDateTimeQuiet(final String s) { } timeZone = TimeZone.valueOf("TZ_" + s.substring(spaceIndex + 1).trim().toUpperCase()); dateTimeString = s.substring(0, spaceIndex); - } else if (JIM_DATETIME_PATTERN.matcher(s).matches()) { - return convertJimDateTimeQuiet(s); - } else if (JIM_MICROS_DATETIME_PATTERN.matcher(s).matches()) { - return convertJimMicrosDateTimeQuiet(s); } if (timeZone == null) { @@ -2066,28 +2073,6 @@ private static long parseNanos(@NotNull final String input) { return result; } - // This function and the next are FAR faster than convertJimMicrosDateTimeQuiet provided you can reuse the time zone - // across calls. Helpful for log file parsing. - public static DateTime convertJimMicrosDateTimeQuietFast(String s, DateTimeZone timeZone) { - int year = extractFourDigitNum(s, 0); - int month = extractTwoDigitNum(s, 5); - int day = extractTwoDigitNum(s, 8); - int hour = extractTwoDigitNum(s, 11); - int min = extractTwoDigitNum(s, 14); - int sec = extractTwoDigitNum(s, 17); - int micros = extractSixDigitNum(s, 20); - org.joda.time.DateTime d = - new org.joda.time.DateTime(year, month, day, hour, min, sec, micros / 1000, timeZone); - return new DateTime(millisToNanos(d.getMillis()) + (micros % 1000) * 1000); - } - - // This function is very slow. If you can call it once and reuse the result across many calls to the above, this is - // FAR faster than convertJimMicrosDateTimeQuiet - public static DateTimeZone convertJimMicrosDateTimeQuietFastTz(String s) { - int tzHours = (s.charAt(26) == '-' ? -1 : 1) * extractTwoDigitNum(s, 27); - return DateTimeZone.forOffsetHours(tzHours); - } - /** * Converts a time String in the form hh:mm:ss[.nnnnnnnnn] to a long nanoseconds offset from Epoch. * @@ -2140,7 +2125,7 @@ public static long convertTimeQuiet(String s) { // shouldn't get here too often, but somehow something snuck through. we'll just return null below... } - return io.deephaven.util.QueryConstants.NULL_LONG; + return NULL_LONG; } /** @@ -2166,49 +2151,6 @@ public static Period convertPeriodQuiet(String s) { return null; } - /** - * Converts a {@link DateTime} to a {@link ZonedDateTime}. - * - * @param dateTime The a {@link DateTime} to convert. - * @return A {@link ZonedDateTime} using the default time zone for the session as indicated by - * {@link TimeZone#TZ_DEFAULT}. - */ - public static ZonedDateTime getZonedDateTime(DateTime dateTime) { - return getZonedDateTime(dateTime, TimeZone.TZ_DEFAULT); - } - - /** - * Converts a {@link DateTime} to a {@link ZonedDateTime}. - * - * @param dateTime The a {@link DateTime} to convert. - * @param timeZone The {@link TimeZone} to use for the conversion. - * @return A {@link ZonedDateTime} using the specified time zone. - */ - public static ZonedDateTime getZonedDateTime(DateTime dateTime, TimeZone timeZone) { - Instant millis = dateTime.getInstant(); - ZoneId zone = timeZone.getTimeZone().toTimeZone().toZoneId(); - return ZonedDateTime.ofInstant(millis, zone); - } - - /** - * Converts a {@link ZonedDateTime} to a {@link DateTime}. - * - * @param zonedDateTime The a {@link ZonedDateTime} to convert. - * @throws DateTimeOverflowException if the input is out of the range for a {@link DateTime}, otherwise, a - * {@link DateTime} version of the input. - */ - public static DateTime toDateTime(ZonedDateTime zonedDateTime) { - int nanos = zonedDateTime.getNano(); - long seconds = zonedDateTime.toEpochSecond(); - - long limit = (Long.MAX_VALUE - nanos) / DateTimeUtils.SECOND; - if (seconds >= limit) { - throw new DateTimeOverflowException("Overflow: cannot convert " + zonedDateTime + " to new DateTime"); - } - - return new DateTime(nanos + (seconds * DateTimeUtils.SECOND)); - } - /** * Returns a {@link ChronoField} indicating the level of precision in a String time value. * @@ -2299,7 +2241,7 @@ public static DateTimeFormatter createFormatter(final String timeZoneName) { */ public static String getPartitionFromTimestampMillis(@NotNull final DateTimeFormatter dateTimeFormatter, final long timestampMillis) { - if (timestampMillis == io.deephaven.util.QueryConstants.NULL_LONG) { + if (timestampMillis == NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } return dateTimeFormatter.format(Instant.ofEpochMilli(timestampMillis)); @@ -2315,7 +2257,7 @@ public static String getPartitionFromTimestampMillis(@NotNull final DateTimeForm */ public static String getPartitionFromTimestampMicros(@NotNull final DateTimeFormatter dateTimeFormatter, final long timestampMicros) { - if (timestampMicros == io.deephaven.util.QueryConstants.NULL_LONG) { + if (timestampMicros == NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } return dateTimeFormatter.format(Instant.ofEpochMilli(timestampMicros / 1_000)); @@ -2331,7 +2273,7 @@ public static String getPartitionFromTimestampMicros(@NotNull final DateTimeForm */ public static String getPartitionFromTimestampNanos(@NotNull final DateTimeFormatter dateTimeFormatter, final long timestampNanos) { - if (timestampNanos == io.deephaven.util.QueryConstants.NULL_LONG) { + if (timestampNanos == NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } return dateTimeFormatter.format(Instant.ofEpochMilli(timestampNanos / 1_000_000)); @@ -2347,7 +2289,7 @@ public static String getPartitionFromTimestampNanos(@NotNull final DateTimeForma */ public static String getPartitionFromTimestampSeconds(@NotNull final DateTimeFormatter dateTimeFormatter, final long timestampSeconds) { - if (timestampSeconds == io.deephaven.util.QueryConstants.NULL_LONG) { + if (timestampSeconds == NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } return dateTimeFormatter.format(Instant.ofEpochMilli(timestampSeconds * 1_000)); diff --git a/engine/time/src/main/java/io/deephaven/time/TimeZone.java b/engine/time/src/main/java/io/deephaven/time/TimeZone.java index f76b4949b9f..af09f201ba0 100644 --- a/engine/time/src/main/java/io/deephaven/time/TimeZone.java +++ b/engine/time/src/main/java/io/deephaven/time/TimeZone.java @@ -3,8 +3,10 @@ */ package io.deephaven.time; +import org.jetbrains.annotations.NotNull; import org.joda.time.DateTimeZone; +import java.time.ZoneId; import java.util.Arrays; import java.util.List; @@ -15,113 +17,115 @@ public enum TimeZone { /** * America/New_York */ - TZ_NY(DateTimeZone.forID("America/New_York")), + TZ_NY("America/New_York"), /** * America/New_York */ - TZ_ET(DateTimeZone.forID("America/New_York")), + TZ_ET("America/New_York"), /** * America/Chicago */ - TZ_MN(DateTimeZone.forID("America/Chicago")), + TZ_MN("America/Chicago"), /** * America/Chicago */ - TZ_CT(DateTimeZone.forID("America/Chicago")), + TZ_CT("America/Chicago"), /** * America/Denver */ - TZ_MT(DateTimeZone.forID("America/Denver")), + TZ_MT("America/Denver"), /** * America/Los_Angeles */ - TZ_PT(DateTimeZone.forID("America/Los_Angeles")), + TZ_PT("America/Los_Angeles"), /** * Pacific/Honolulu */ - TZ_HI(DateTimeZone.forID("Pacific/Honolulu")), + TZ_HI("Pacific/Honolulu"), /** * America/Sao_Paulo */ - TZ_BT(DateTimeZone.forID("America/Sao_Paulo")), + TZ_BT("America/Sao_Paulo"), /** * Asia/Seoul */ - TZ_KR(DateTimeZone.forID("Asia/Seoul")), + TZ_KR("Asia/Seoul"), /** * Asia/Hong_Kong */ - TZ_HK(DateTimeZone.forID("Asia/Hong_Kong")), + TZ_HK("Asia/Hong_Kong"), /** * Asia/Tokyo */ - TZ_JP(DateTimeZone.forID("Asia/Tokyo")), + TZ_JP("Asia/Tokyo"), /** * Canada/Atlantic */ - TZ_AT(DateTimeZone.forID("Canada/Atlantic")), + TZ_AT("Canada/Atlantic"), /** * Canada/Newfoundland */ - TZ_NF(DateTimeZone.forID("Canada/Newfoundland")), + TZ_NF("Canada/Newfoundland"), /** * America/Anchorage */ - TZ_AL(DateTimeZone.forID("America/Anchorage")), + TZ_AL("America/Anchorage"), /** * Asia/Kolkata */ - TZ_IN(DateTimeZone.forID("Asia/Kolkata")), + TZ_IN("Asia/Kolkata"), /** * Europe/Berlin */ - TZ_CE(DateTimeZone.forID("Europe/Berlin")), + TZ_CE("Europe/Berlin"), /** * Asia/Singapore */ - TZ_SG(DateTimeZone.forID("Asia/Singapore")), + TZ_SG("Asia/Singapore"), /** * Europe/London */ - TZ_LON(DateTimeZone.forID("Europe/London")), + TZ_LON("Europe/London"), /** * Europe/Moscow */ - TZ_MOS(DateTimeZone.forID("Europe/Moscow")), + TZ_MOS("Europe/Moscow"), /** * Asia/Shanghai */ - TZ_SHG(DateTimeZone.forID("Asia/Shanghai")), + TZ_SHG("Asia/Shanghai"), /** * Europe/Zurich */ - TZ_CH(DateTimeZone.forID("Europe/Zurich")), + TZ_CH("Europe/Zurich"), /** * Europe/Amsterdam */ - TZ_NL(DateTimeZone.forID("Europe/Amsterdam")), + TZ_NL("Europe/Amsterdam"), /** * Asia/Taipei */ - TZ_TW(DateTimeZone.forID("Asia/Taipei")), + TZ_TW("Asia/Taipei"), /** * Australia/Sydney */ - TZ_SYD(DateTimeZone.forID("Australia/Sydney")), + TZ_SYD("Australia/Sydney"), /** * UTC */ - TZ_UTC(DateTimeZone.UTC); + TZ_UTC("UTC"); /** * The default time zone for display purposes. */ public static TimeZone TZ_DEFAULT = TZ_NY; - private DateTimeZone timeZone; + private final DateTimeZone timeZone; + private final ZoneId zoneId; - TimeZone(DateTimeZone timeZone) { - this.timeZone = timeZone; + TimeZone(final @NotNull String timeZone) { + this.timeZone = DateTimeZone.forID(timeZone); + this.zoneId = ZoneId.of(timeZone); } /** @@ -133,6 +137,15 @@ public DateTimeZone getTimeZone() { return timeZone; } + /** + * Returns the Java ZoneID for this DBTimeZone; + * + * @return the ZoneId + */ + public ZoneId getZoneId() { + return zoneId; + } + /** * Find the corresponding TimeZone for a given Joda DateTimeZone. * @@ -193,7 +206,7 @@ public static TimeZone getTzDefault() { /** * Set the default time zone. - * + * * @param tzDefault the {@link TimeZone} to be used as the default. */ public static void setTzDefault(TimeZone tzDefault) { diff --git a/engine/time/src/test/java/io/deephaven/time/TestDateTime.java b/engine/time/src/test/java/io/deephaven/time/TestDateTime.java index 463cc90b18f..38eefb8657c 100644 --- a/engine/time/src/test/java/io/deephaven/time/TestDateTime.java +++ b/engine/time/src/test/java/io/deephaven/time/TestDateTime.java @@ -3,9 +3,7 @@ */ package io.deephaven.time; -import io.deephaven.base.clock.TimeZones; import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.time.calendar.Calendars; import junit.framework.TestCase; import org.joda.time.DateTimeZone; @@ -67,87 +65,6 @@ private long getMillisFromDateStr(SimpleDateFormat format, String dateStr) { } } - public void testLastBusinessDateNy() { - SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); - format.setTimeZone(TimeZones.TZ_NEWYORK); // stick with one timezone to ensure the test works properly - - String today; - String dayBefore; - - // Test that the overloaded methods match (this will break if we manage to straddle midnight while it's run!) - TestCase.assertEquals(DateTimeUtils.lastBusinessDateNy(), - DateTimeUtils.lastBusinessDateNy(System.currentTimeMillis())); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Test Monday-Friday - today = "2013-11-18"; - dayBefore = "2013-11-15"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Test end of month - today = "2013-11-01"; - dayBefore = "2013-10-31"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Test end of year - today = "2012-01-01"; - dayBefore = "2011-12-30"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Test a holiday (2013 thanksgivig) - today = "2013-11-28"; - dayBefore = "2013-11-27"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - today = "2013-11-29"; - dayBefore = "2013-11-27"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Now test the current time - // (Straight business calendar and the DBUtils codepath) - String fromCal = - Calendars.calendar().previousBusinessDay(DateTimeUtils.millisToTime(System.currentTimeMillis())); - TestCase.assertEquals(DateTimeUtils.lastBusinessDateNy(), fromCal); - // Test it a second time, since its cached - TestCase.assertEquals(DateTimeUtils.lastBusinessDateNy(), fromCal); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Test cache rollover given times that advance a day - today = "2013-11-26"; - dayBefore = "2013-11-25"; - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - today = "2013-11-27"; - dayBefore = "2013-11-26"; - // Make sure it advances just past midnight - TestCase.assertEquals(dayBefore, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today) + 1)); - - // Rolling back should not work -- we have cached a later day - today = "2013-11-26"; - String expected = "2013-11-26"; - TestCase.assertEquals(expected, DateTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Try the date time overrides - String override = "2013-11-27"; - DateTimeUtils.lastBusinessDayNyOverride = override; - TestCase.assertEquals(DateTimeUtils.lastBusinessDateNy(), override); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - // Now set the current date and see if the helper function workos - DateTimeUtils.currentDateNyOverride = override; - TestCase.assertEquals(DateTimeUtils.currentDateNy(), override); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - DateTimeUtils.overrideLastBusinessDateNyFromCurrentDateNy(); - TestCase.assertEquals(DateTimeUtils.lastBusinessDateNy(), "2013-11-26"); - DateTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; - - } - public void testToDateString() { DateTime dateTime = convertDateTime("2016-11-06T04:00 UTC"); // 11/6 is the last day of DST diff --git a/engine/time/src/test/java/io/deephaven/time/TestDateTimeUtils.java b/engine/time/src/test/java/io/deephaven/time/TestDateTimeUtils.java index a2a0a78879a..71825308d52 100644 --- a/engine/time/src/test/java/io/deephaven/time/TestDateTimeUtils.java +++ b/engine/time/src/test/java/io/deephaven/time/TestDateTimeUtils.java @@ -593,13 +593,6 @@ public void testUpperBinWithOffset() { DateTimeUtils.upperBin(DateTimeUtils.upperBin(time, second, second), second, second)); } - public void testConvertJimDateTimeQuiet() { - String s = "2010-09-02T08:17:17.502-0400"; - DateTime known = DateTimeUtils.convertDateTimeQuiet(s); - DateTime trial = DateTimeUtils.convertJimDateTimeQuiet(s); - TestCase.assertEquals(known, trial); - } - public void testGetExcelDate() { DateTime time = DateTimeUtils.convertDateTime("2010-06-15T16:00:00 NY"); TestCase.assertTrue(CompareUtils.doubleEquals(40344.666666666664, DateTimeUtils.getExcelDateTime(time))); @@ -610,7 +603,7 @@ public void testGetExcelDate() { /** * Test autoEpcohTime with the given epoch time. - * + * * @param epoch Epoch time (in seconds) * @return The year (in the New York timezone) in which the given time falls. */ diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 392de1a7914..eac637aa7c0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -28,6 +28,8 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.Iterator; public interface ChunkInputStreamGenerator extends SafeCloseable { @@ -101,6 +103,30 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( } return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); } + if (type == Instant.class) { + // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + ObjectChunk objChunk = chunk.asObjectChunk(); + WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + for (int i = 0; i < objChunk.size(); ++i) { + outChunk.set(i, DateTimeUtils.toEpochNano(objChunk.get(i))); + } + if (chunk instanceof PoolableChunk) { + ((PoolableChunk) chunk).close(); + } + return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + } + if (type == ZonedDateTime.class) { + // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + ObjectChunk objChunk = chunk.asObjectChunk(); + WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + for (int i = 0; i < objChunk.size(); ++i) { + outChunk.set(i, DateTimeUtils.toEpochNano(objChunk.get(i))); + } + if (chunk instanceof PoolableChunk) { + ((PoolableChunk) chunk).close(); + } + return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + } if (type == Boolean.class) { return BooleanChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); } @@ -231,6 +257,18 @@ static WritableChunk extractChunkFromInputStream( fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows ); } + if (type == Instant.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> DateTimeUtils.makeInstant(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == ZonedDateTime.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> DateTimeUtils.makeZonedDateTime(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } if (type == Byte.class) { return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 90b15f95323..7ec05158c09 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -55,8 +55,10 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZonedDateTime; import java.util.*; import java.util.function.*; import java.util.stream.Collectors; @@ -570,7 +572,7 @@ private static ArrowType arrowTypeFor(Class type) { || type == BigInteger.class) { return Types.MinorType.VARBINARY.getType(); } - if (type == DateTime.class) { + if (type == DateTime.class || type == Instant.class || type == ZonedDateTime.class) { return NANO_SINCE_EPOCH_TYPE; } diff --git a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy index c026e92ee9b..214ead79601 100644 --- a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy +++ b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy @@ -1,5 +1,6 @@ package io.deephaven.lang.completion +import io.deephaven.base.clock.Clock; import io.deephaven.engine.context.TestExecutionContext import io.deephaven.engine.table.Table import io.deephaven.engine.table.TableDefinition @@ -15,7 +16,7 @@ import spock.lang.Unroll class ColumnExpressionCompletionHandlerTest extends Specification implements ChunkerCompleterMixin { - private static String src_(String methodName = 't', String columnName = 'Date', String completion = "las") { + private static String src_(String methodName = 't', String columnName = 'Date', String completion = "cur") { return """u = ${methodName}.update('$columnName = $completion""" } @@ -33,7 +34,7 @@ class ColumnExpressionCompletionHandlerTest extends Specification implements Chu def "Completion at #position should find typesafe column completion for partially completed column expressions"(int position, Set completions) { given: -//u = t.update('Date=las +//u = t.update('Date=cur String src = src_() CompletionParser p = new CompletionParser() doc = p.parse(src) @@ -42,7 +43,7 @@ class ColumnExpressionCompletionHandlerTest extends Specification implements Chu VariableProvider variables = Mock(VariableProvider) { (0..1) * getVariableNames() >> ['t'] (0..1) * getVariableType('t') >> Table - (0..1) * getTableDefinition('t') >> TableDefinition.from(['Date', 'DateTime'], [String, DateTime] + (0..1) * getTableDefinition('t') >> TableDefinition.from(['Date', 'DateClock'], [DateTime, Clock] ) } @@ -53,6 +54,7 @@ class ColumnExpressionCompletionHandlerTest extends Specification implements Chu .collect { this.doCompletion(src, it) } then: "Expect the completion result to suggest all namespaces" + result.forEach(System.out::println) result.size() == completions.size() // with() inside a then will assert on each removal with(result){ @@ -64,15 +66,15 @@ class ColumnExpressionCompletionHandlerTest extends Specification implements Chu where: position | completions - // between `e=`, expect method name completions, and a single column name completion, for DateTime + // between `e=`, expect method name completions, and a single column name completion, for Clock 19 | [ - src_('t', 'Date', "lastBusinessDateNy()'"), - src_('t', 'Date', 'lastBusinessDateNy('), + src_('t', 'Date', "currentTime()'"), + src_('t', 'Date', "currentTimeMillis()'"), ] 18 | [ - src_('t', 'Date', "lastBusinessDateNy()'"), - src_('t', 'Date', 'lastBusinessDateNy('), - src_('t', 'DateTime', 'las'), + src_('t', 'Date', "currentTime()'"), + src_('t', 'Date', "currentTimeMillis()'"), + src_('t', 'DateClock', "cur"), ] } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateDupCompactKernel.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateDupCompactKernel.java index 40f1abeb981..86da6ebe60a 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateDupCompactKernel.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateDupCompactKernel.java @@ -122,7 +122,7 @@ public static void nanFixup(String path, String type, boolean ascending) throws lines = ReplicateSortKernel.fixupNanComparisons(lines, type, ascending); - lines = simpleFixup(lines, "eq", "lhs == rhs", type + "Comparisons.eq(lhs, rhs)"); + lines = simpleFixup(lines, "equality", "lhs == rhs", type + "Comparisons.eq(lhs, rhs)"); FileUtils.writeLines(file, lines); } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateFreezeBy.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateFreezeBy.java index 5efb7d27af8..1efd135cf58 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateFreezeBy.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateFreezeBy.java @@ -11,18 +11,14 @@ import java.nio.charset.Charset; import java.util.Collections; import java.util.List; -import java.util.Optional; import static io.deephaven.replication.ReplicatePrimitiveCode.*; public class ReplicateFreezeBy { public static void main(String[] args) throws IOException { - final List results = charToAllButBoolean( + charToAllButBoolean( "engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/CharFreezeByHelper.java"); - final Optional longResult = results.stream().filter(s -> s.contains("Long")).findFirst(); - // noinspection OptionalGetWithoutIsPresent - fixupLong(longResult.get()); final String objectResult = charToObject( "engine/table/src/main/java/io/deephaven/engine/table/impl/util/freezeby/CharFreezeByHelper.java"); fixupObject(objectResult); @@ -48,12 +44,4 @@ private static void fixupBoolean(String booleanResult) throws IOException { "final ObjectChunk asBoolean = values.asObjectChunk"); FileUtils.writeLines(booleanFile, newLines); } - - private static void fixupLong(String longResult) throws IOException { - final File longFile = new File(longResult); - final List lines = FileUtils.readLines(longFile, Charset.defaultCharset()); - final List newLines = - ReplicationUtils.globalReplacements(0, lines, "LongArraySource", "AbstractLongArraySource"); - FileUtils.writeLines(longFile, newLines); - } } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java index 8a4b228f6bc..56f398fb232 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java @@ -64,21 +64,39 @@ private static void replicateObjectAddOnlyMinMax() throws IOException { FileUtils.writeLines(objectAddOnlyMinMaxFile, lines); } + private static final String resultInitReplacementForLong = "" + + " if (type == DateTime.class) {\n" + + " actualResult = new DateTimeArraySource();\n" + + " resultColumn = ((NanosBasedTimeArraySource)actualResult).toEpochNano();\n" + + " } else if (type == Instant.class) {\n" + + " actualResult = new InstantArraySource();\n" + + " resultColumn = ((NanosBasedTimeArraySource)actualResult).toEpochNano();\n" + + " } else {\n" + + " actualResult = resultColumn = new LongArraySource();\n" + + " }"; + private static void fixupLongAddOnlyMinMax() throws IOException { final File longAddOnlyMinMaxFile = new File( "engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java"); List lines = ReplicationUtils .fixupChunkAttributes(FileUtils.readLines(longAddOnlyMinMaxFile, Charset.defaultCharset())); - lines = ReplicationUtils.globalReplacements(lines, "LongArraySource", "AbstractLongArraySource"); + lines = ReplicationUtils.replaceRegion(lines, "actualResult", Collections.singletonList( + " private final ArrayBackedColumnSource actualResult;")); lines = ReplicationUtils.replaceRegion(lines, "extra constructor params", Collections.singletonList(" Class type,")); - lines = ReplicationUtils.replaceRegion(lines, "resultColumn initialization", Collections.singletonList( - " resultColumn = type == DateTime.class ? new DateTimeArraySource() : new LongArraySource();")); lines = ReplicationUtils.addImport(lines, + "import java.time.Instant;", "import io.deephaven.time.DateTime;", + "import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource;", "import io.deephaven.engine.table.impl.sources.DateTimeArraySource;", - "import io.deephaven.engine.table.impl.sources.LongArraySource;"); + "import io.deephaven.engine.table.impl.sources.InstantArraySource;", + "import io.deephaven.engine.table.impl.sources.LongArraySource;", + "import io.deephaven.engine.table.impl.sources.NanosBasedTimeArraySource;"); + lines = ReplicationUtils.replaceRegion(lines, "resultColumn initialization", + Collections.singletonList(resultInitReplacementForLong)); + lines = ReplicationUtils.replaceRegion(lines, "getResultColumns", Collections.singletonList( + " return Collections.>singletonMap(name, actualResult);")); FileUtils.writeLines(longAddOnlyMinMaxFile, lines); } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java index 5e42dacfc14..9d4bc590d8a 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java @@ -3,6 +3,7 @@ */ package io.deephaven.replicators; +import io.deephaven.base.verify.Require; import io.deephaven.replication.ReplicatePrimitiveCode; import io.deephaven.replication.ReplicationUtils; import org.apache.commons.io.FileUtils; @@ -10,10 +11,18 @@ import java.io.File; import java.io.IOException; import java.nio.charset.Charset; -import java.nio.file.Files; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static io.deephaven.replication.ReplicatePrimitiveCode.*; import static io.deephaven.replication.ReplicationUtils.*; @@ -21,11 +30,10 @@ public class ReplicateSourcesAndChunks { public static void main(String... args) throws IOException { + replicateArraySources(); replicateSparseArraySources(); - replicateSingleValues(); - charToAllButBooleanAndLong( - "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java"); + charToAllButBoolean( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/aggregate/CharAggregateColumnSource.java"); charToAllButBoolean( @@ -39,7 +47,7 @@ public static void main(String... args) throws IOException { charToAllButBooleanAndLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java"); - fixupLongReinterpret(charToLong( + fixupImmutableLongArraySource(charToLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java")); fixupByteReinterpret( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java"); @@ -47,7 +55,7 @@ public static void main(String... args) throws IOException { charToAllButBooleanAndLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java"); - fixupLongReinterpret(charToLong( + fixupImmutable2DLongArraySource(charToLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java")); fixupByteReinterpret( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java"); @@ -55,7 +63,7 @@ public static void main(String... args) throws IOException { charToAllButBooleanAndLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java"); - fixupLongReinterpret(charToLong( + fixupImmutableConstantLongSource(charToLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java")); fixupByteReinterpret( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java"); @@ -98,21 +106,230 @@ private static void replicateObjectSizedChunk() throws IOException { FileUtils.writeLines(classFile, lines); } - private static void fixupLongReinterpret(String longImmutableSource) throws IOException { + private static void fixupImmutableLongArraySource(String longImmutableSource) throws IOException { final File resultClassJavaFile = new File(longImmutableSource); List lines = FileUtils.readLines(resultClassJavaFile, Charset.defaultCharset()); lines = addImport(lines, "import io.deephaven.time.DateTime;"); lines = addImport(lines, "import io.deephaven.engine.table.ColumnSource;"); - lines = replaceRegion(lines, "reinterpret", Arrays.asList(" @Override", - " public boolean allowsReinterpret(", - " @NotNull final Class alternateDataType) {", - " return alternateDataType == DateTime.class;", + lines = addImport(lines, LongFunction.class, ToLongFunction.class, Instant.class, ZonedDateTime.class, + LocalDate.class, LocalTime.class, Require.class, ZoneId.class); + lines = standardCleanups(lines); + lines = globalReplacements(lines, "/\\*\\s*MIXIN_IMPLS\\s*\\*/", ", ConvertableTimeSource"); + lines = replaceRegion(lines, "fillChunkByRanges", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillChunkByKeys", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillChunkUnordered", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByRanges", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByKeys", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkUnordered", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull final Class alternateDataType) {", + " return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class;", " }", "", - " protected ColumnSource doReinterpret(", - " @NotNull Class alternateDataType) {", - " //noinspection unchecked", - " return (ColumnSource) new LongAsDateTimeColumnSource(this);", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if (alternateDataType == this.getType()) {", + " return (ColumnSource) this;", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " } else if (alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " }", + "", + " throw new IllegalArgumentException(\"Cannot reinterpret `\" + getType().getName() + \"` to `\" + alternateDataType.getName() + \"`\");", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) {", + " return new ImmutableZonedDateTimeArraySource(Require.neqNull(zone, \"zone\"), this);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(final @NotNull ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(final @NotNull ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toDateTime() {", + " return new ImmutableDateTimeArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toInstant() {", + " return new ImmutableInstantArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", + " }")); + FileUtils.writeLines(resultClassJavaFile, lines); + } + + private static void fixupImmutableConstantLongSource(String longImmutableSource) throws IOException { + final File resultClassJavaFile = new File(longImmutableSource); + List lines = FileUtils.readLines(resultClassJavaFile, Charset.defaultCharset()); + lines = addImport(lines, "import io.deephaven.time.DateTime;"); + lines = addImport(lines, "import io.deephaven.engine.table.ColumnSource;"); + lines = addImport(lines, Instant.class, ZonedDateTime.class, LocalDate.class, LocalTime.class, Require.class, + ZoneId.class); + lines = standardCleanups(lines); + lines = globalReplacements(lines, "/\\*\\s*MIXIN_IMPLS\\s*\\*/", ", ConvertableTimeSource"); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull final Class alternateDataType) {", + " return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class;", + " }", + "", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if (alternateDataType == this.getType()) {", + " return (ColumnSource) this;", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " } else if (alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " }", + "", + " throw new IllegalArgumentException(\"Cannot reinterpret `\" + getType().getName() + \"` to `\" + alternateDataType.getName() + \"`\");", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) {", + " return new ImmutableConstantZonedDateTimeSource(Require.neqNull(zone, \"zone\"), this);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(final @NotNull ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(final @NotNull ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toDateTime() {", + " return new ImmutableConstantDateTimeSource(this);", + " }", + "", + " @Override", + " public ColumnSource toInstant() {", + " return new ImmutableConstantInstantSource(this);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", + " }")); + FileUtils.writeLines(resultClassJavaFile, lines); + } + + private static void fixupImmutable2DLongArraySource(String longImmutableSource) throws IOException { + final File resultClassJavaFile = new File(longImmutableSource); + List lines = FileUtils.readLines(resultClassJavaFile, Charset.defaultCharset()); + lines = addImport(lines, "import io.deephaven.time.DateTime;"); + lines = addImport(lines, "import io.deephaven.engine.table.ColumnSource;"); + lines = addImport(lines, LongFunction.class, ToLongFunction.class, Instant.class, ZonedDateTime.class, + LocalDate.class, LocalTime.class, Require.class, ZoneId.class); + lines = standardCleanups(lines); + lines = globalReplacements(lines, "/\\*\\s*MIXIN_IMPLS\\s*\\*/", ", ConvertableTimeSource"); + lines = replaceRegion(lines, "fillChunkByRanges", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillChunkByKeys", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillChunkUnordered", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByRanges", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByKeys", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkUnordered", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull final Class alternateDataType) {", + " return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class;", + " }", + "", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if (alternateDataType == this.getType()) {", + " return (ColumnSource) this;", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " } else if (alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " }", + "", + " throw new IllegalArgumentException(\"Cannot reinterpret `\" + getType().getName() + \"` to `\" + alternateDataType.getName() + \"`\");", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) {", + " return new Immutable2DZonedDateTimeArraySource(Require.neqNull(zone, \"zone\"), this);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(final @NotNull ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(final @NotNull ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toDateTime() {", + " return new Immutable2DDateTimeArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toInstant() {", + " return new Immutable2DInstantArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", " }")); FileUtils.writeLines(resultClassJavaFile, lines); } @@ -121,7 +338,7 @@ private static void fixupByteReinterpret(String byteImmutableSource) throws IOEx final File resultClassJavaFile = new File(byteImmutableSource); List lines = FileUtils.readLines(resultClassJavaFile, Charset.defaultCharset()); lines = addImport(lines, "import io.deephaven.engine.table.ColumnSource;"); - lines = replaceRegion(lines, "reinterpret", Arrays.asList(" @Override", + lines = replaceRegion(lines, "reinterpretation", Arrays.asList(" @Override", " public boolean allowsReinterpret(", " @NotNull final Class alternateDataType) {", " return alternateDataType == Boolean.class;", @@ -323,15 +540,20 @@ private static List genericObjectColumnSourceReplacements(List l return lines; } + private static void replicateArraySources() throws IOException { + charToAllButBooleanAndLong( + "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java"); + replicateLongArraySource(); + } + private static void replicateSparseArraySources() throws IOException { replicateOneOrN(); charToAllButBooleanAndLong( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java"); - replicateSparseLongSource(); - - replicateSparseBooleanSource(); - replicateSparseObjectSource(); + replicateLongSparseArraySource(); + replicateBooleanSparseArraySource(); + replicateObjectSparseArraySource(); } private static void replicateChunks() throws IOException { @@ -475,10 +697,10 @@ private static void replicateWritableObjectChunks() throws IOException { lines = ReplicationUtils.removeRegion(lines, "BufferImports"); lines = expandDowncast(lines, "WritableObjectChunk"); lines = ReplicationUtils.replaceRegion(lines, "fillWithBoxedValue", Arrays.asList( - " @Override\n" + - " public final void fillWithBoxedValue(int offset, int size, Object value) {\n" + - " fillWithValue(offset,size, (T)value);\n" + - " }")); + " @Override", + " public final void fillWithBoxedValue(int offset, int size, Object value) {", + " fillWithValue(offset,size, (T)value);", + " }")); lines = ReplicationUtils.addImport(lines, "import io.deephaven.util.compare.ObjectComparisons;", "import java.util.Comparator;"); @@ -656,42 +878,281 @@ private static void replicateObjectChunkFiller() throws IOException { FileUtils.writeLines(classFile, lines); } - private static void replicateSparseLongSource() throws IOException { - final File longSparseArraySourceFile = new File( - "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java"); - final File abstractLongSparseArraySourceFile = new File( - "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/AbstractSparseLongArraySource.java"); - - - final String longSparseCode = FileUtils.readFileToString(longSparseArraySourceFile, Charset.defaultCharset()); - charToLong("engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java", - Collections.emptyMap()); - - Files.delete(abstractLongSparseArraySourceFile.toPath()); - Files.move(longSparseArraySourceFile.toPath(), abstractLongSparseArraySourceFile.toPath()); + private static void replicateLongArraySource() throws IOException { + final String className = charToLong( + "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java"); + final File classFile = new File(className); + List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); + lines = addImport(lines, + "import io.deephaven.time.DateTime;", + "import io.deephaven.engine.table.impl.util.copy.CopyKernel;"); + lines = addImport(lines, LongFunction.class, ToLongFunction.class, Instant.class, ZonedDateTime.class, + LocalDate.class, LocalTime.class, Require.class, ZoneId.class); + lines = standardCleanups(lines); + lines = globalReplacements(lines, "/\\*\\s*MIXIN_IMPLS\\s*\\*/", ", ConvertableTimeSource"); + lines = replaceRegion(lines, "getAndAddUnsafe", Arrays.asList( + " public final long getAndAddUnsafe(long index, long addend) {", + " final int blockIndex = (int) (index >> LOG_BLOCK_SIZE);", + " final int indexWithinBlock = (int) (index & INDEX_MASK);", + " final long oldValue = blocks[blockIndex][indexWithinBlock];", + " if (addend != 0) {", + " if (shouldRecordPrevious(index, prevBlocks, recycler)) {", + " prevBlocks[blockIndex][indexWithinBlock] = oldValue;", + " }", + " blocks[blockIndex][indexWithinBlock] = oldValue + addend;", + " }", + " return oldValue;", + " }")); + lines = replaceRegion(lines, "fillChunk", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillPrevChunk", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillSparseChunk", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillSparsePrevChunk", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillSparseChunkUnordered", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillSparsePrevChunkUnordered", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "copyFromChunk", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByRanges", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByKeys", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkUnordered", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull final Class alternateDataType) {", + " return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class;", + " }", + "", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if (alternateDataType == this.getType()) {", + " return (ColumnSource) this;", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " } else if (alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " }", + "", + " throw new IllegalArgumentException(\"Cannot reinterpret `\" + getType().getName() + \"` to `\" + alternateDataType.getName() + \"`\");", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) {", + " return new ZonedDateTimeArraySource(Require.neqNull(zone, \"zone\"), this);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(final @NotNull ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(final @NotNull ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toDateTime() {", + " return new DateTimeArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toInstant() {", + " return new InstantArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", + " }")); + FileUtils.writeLines(classFile, lines); + } - FileUtils.writeStringToFile(longSparseArraySourceFile, longSparseCode, Charset.defaultCharset()); + private static void replicateLongSparseArraySource() throws IOException { + final String className = charToLong( + "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java"); + final File classFile = new File(className); + List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); + lines = addImport(lines, "import io.deephaven.time.DateTime;"); + lines = addImport(lines, LongFunction.class, ToLongFunction.class, Instant.class, ZonedDateTime.class, + LocalDate.class, LocalTime.class, Require.class, ZoneId.class); + lines = standardCleanups(lines); + lines = globalReplacements(lines, "/\\*\\s*MIXIN_IMPLS\\s*\\*/", ", ConvertableTimeSource"); + lines = replaceRegion(lines, "fillByRanges", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillByKeys", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillByUnRowSequence", l -> addLongToBoxedAdapter(l, "LongFunction", + "WritableObjectChunk", "asWritableObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByRanges", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkByKeys", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "fillFromChunkUnordered", + l -> addLongToBoxedAdapter(l, "ToLongFunction", "ObjectChunk", + "asObjectChunk")); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull final Class alternateDataType) {", + " return alternateDataType == long.class || alternateDataType == Instant.class || alternateDataType == DateTime.class;", + " }", + "", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if (alternateDataType == this.getType()) {", + " return (ColumnSource) this;", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " } else if (alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " }", + "", + " throw new IllegalArgumentException(\"Cannot reinterpret `\" + getType().getName() + \"` to `\" + alternateDataType.getName() + \"`\");", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) {", + " return new ZonedDateTimeSparseArraySource(Require.neqNull(zone, \"zone\"), this);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(final @NotNull ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(final @NotNull ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toDateTime() {", + " return new DateTimeSparseArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toInstant() {", + " return new InstantSparseArraySource(this);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", + " }")); + FileUtils.writeLines(classFile, lines); + } - List abstractLines = FileUtils.readLines(abstractLongSparseArraySourceFile, Charset.defaultCharset()); - abstractLines = globalReplacements(abstractLines, "LongSparseArraySource", "AbstractSparseLongArraySource", - "public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForLong", - "abstract public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.LongBacked", - "ColumnSource", "ColumnSource"); - abstractLines = replaceRegion(abstractLines, "constructor", Arrays.asList( - " AbstractSparseLongArraySource(Class type) {", - " super(type);", - " blocks = new LongOneOrN.Block0();", " }")); - abstractLines = replaceRegion(abstractLines, "boxed methods", Collections.emptyList()); - abstractLines = replaceRegion(abstractLines, "copy method", Collections.emptyList()); - abstractLines = simpleFixup(abstractLines, "getChunk", "LongChunk getChunk", "Chunk getChunk"); - abstractLines = simpleFixup(abstractLines, "getPrevChunk", "LongChunk getPrevChunk", - "Chunk getPrevChunk"); - abstractLines = standardCleanups(abstractLines); - - FileUtils.writeLines(abstractLongSparseArraySourceFile, abstractLines); + private static List addLongToBoxedAdapter( + final List inputLines, String convertType, String chunkClass, String chunkMethod) { + List permuted = globalReplacements(inputLines, + "/\\*\\s*TYPE_MIXIN\\s*\\*/", "", + "/\\*\\s*CONVERTER\\s*\\*/", ", " + convertType + " converter", + "/\\*\\s*CONVERTER_ARG\\s*\\*/", ", converter", + "@Override", "", + "NULL_LONG", "null"); + + permuted = applyFixup(permuted, "chunkDecl", "^(\\s+)[^=]+=\\s+([^.]+)(.*)", (m) -> Collections.singletonList( + m.group(1) + "final " + chunkClass + " chunk = " + m.group(2) + "." + chunkMethod + "();")); + permuted = replaceRegion(permuted, "copyFromTypedArray", Arrays.asList( + " for (int ii = 0; ii < length; ii++) {", + " chunk.set(offset + ii, converter.apply(block[sIndexWithinBlock + ii]));", + " }")); + permuted = replaceRegion(permuted, "copyToTypedArray", Arrays.asList( + " for (int jj = 0; jj < length; jj++) {", + " block[jj + sIndexWithinBlock] = converter.applyAsLong(chunk.get(offset + jj));", + " }")); + permuted = replaceRegion(permuted, "copyFromTypedArrayImmutable", Arrays.asList( + " final int offset = destPosition.getAndAdd(length);", + " for (int ii = 0; ii < length; ii++) {", + " chunk.set(offset + ii, converter.apply(data[(int)start + ii]));", + " }")); + permuted = replaceRegion(permuted, "copyToTypedArrayImmutable", Arrays.asList( + " final int offset = srcPos.getAndAdd(length);", + " for (int jj = 0; jj < length; jj++) {", + " data[(int)start + jj] = converter.applyAsLong(chunk.get(offset + jj));", + " }")); + permuted = replaceRegion(permuted, "copyFromTypedArrayImmutable2D", Arrays.asList( + " final int destOffset = destPosition.getAndAdd(length);", + " for (int ii = 0; ii < length; ii++) {", + " chunk.set(destOffset + ii, converter.apply(data[segment][offset + ii]));", + " }")); + permuted = replaceRegion(permuted, "copyToTypedArrayImmutable2D", Arrays.asList( + " final int offset = srcPos.getAndAdd(length);", + " for (int jj = 0; jj < length; jj++) {", + " data[segment][destOffset + jj] = converter.applyAsLong(chunk.get(offset + jj));", + " }")); + permuted = replaceRegion(permuted, "conditionalCopy", Arrays.asList( + " long[] baseInput = (long[]) getBlock(blockNo);", + " long[] overInput = (long[]) getPrevBlock(blockNo);", + " effectiveContext.copyKernel.conditionalCopy(destination, baseInput, overInput,", + " inUse, srcOffset, destOffset.intValue(), length);", + "", + " int bitsSet = 0;", + " final int bitsetLen = (length + 63) >> 6;", + " final int bitsetOffset = srcOffset >> 6;", + " for (int i = 0; i < bitsetLen; ++i) {", + " bitsSet += Long.bitCount(inUse[i + bitsetOffset]);", + " }", + " final int totalBits = bitsetLen << 6;", + " final boolean flipBase = bitsSet > totalBits / 2;", + "", + " // mem-copy from baseline", + " for (int ii = 0; ii < length; ++ii) {", + " chunk.set(destOffset.intValue() + ii, converter.apply((flipBase ? overInput : baseInput)[srcOffset + ii]));", + " }", + "", + " final int srcEndOffset = srcOffset + length;", + " for (int ii = CopyKernel.Utils.nextSetBit(inUse, srcOffset, srcEndOffset, flipBase);", + " ii < srcEndOffset;", + " ii = CopyKernel.Utils.nextSetBit(inUse, ii + 1, srcEndOffset, flipBase)) {", + " chunk.set(destOffset.intValue() + ii - srcOffset,", + " converter.apply(flipBase ? baseInput[ii] : overInput[ii]));", + " }")); + permuted = applyFixup(permuted, "copyFromArray", + "^(\\s+).+\\.copyFromArray\\(([^,]+), ([^,]+), ([^,]+), ([^)]+)\\).*", (m) -> Arrays.asList( + m.group(1) + "{", + m.group(1) + " long[] block = (long[])" + m.group(2) + ";", + m.group(1) + " for (int ii = 0; ii < " + m.group(5) + "; ii++) {", + m.group(1) + " chunk.set(ii + " + m.group(4) + ", converter.apply(block[ii + " + + m.group(3) + "]));", + m.group(1) + " }", + m.group(1) + "}")); + permuted = applyFixup(permuted, "conversion", "^(\\s+chunk\\.set\\([^,]+,)([^;]+);", + (m) -> Collections.singletonList(m.group(1) + "converter.apply(" + m.group(2) + ");")); + permuted = applyFixup(permuted, "conversion", "^([^=]+=\\s+)([^;]+);", + (m) -> Collections.singletonList(m.group(1) + "converter.applyAsLong(" + m.group(2) + ");")); + permuted = applyFixup(permuted, "conversion", "^(\\s+set\\([^,]+,)([^;]+);", + (m) -> Collections.singletonList(m.group(1) + "converter.applyAsLong(" + m.group(2) + ");")); + + // Stick them together. + return Stream.concat(inputLines.stream(), permuted.stream()).collect(Collectors.toList()); } - private static void replicateSparseBooleanSource() throws IOException { + private static void replicateBooleanSparseArraySource() throws IOException { final String booleanPath = charToBooleanAsByte( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java", Collections.emptyMap()); @@ -730,7 +1191,7 @@ private static void replicateSparseBooleanSource() throws IOException { lines = applyFixup(lines, "fillByKeys", "(.*chunk.set\\(.*, )(ctx\\.block.*)(\\);.*)", m -> Collections .singletonList(m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); - lines = applyFixup(lines, "fillByUnRowSequence", "(.*byteChunk.set\\(.*, )(block.*)(\\);.*)", m -> Collections + lines = applyFixup(lines, "fillByUnRowSequence", "(.*chunk.set\\(.*, )(block.*)(\\);.*)", m -> Collections .singletonList(m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); lines = applyFixup(lines, "fillFromChunkByKeys", "(.*)(chunk.get\\(.*\\));", m -> Collections.singletonList(m.group(1) + "BooleanUtils.booleanAsByte(" + m.group(2) + ");")); @@ -756,9 +1217,8 @@ private static void replicateSparseBooleanSource() throws IOException { " return getPrevChunkByFilling(context, RowSequence).asObjectChunk();", " }")); - lines = simpleFixup(lines, "fillByUnRowSequence", "WritableObjectChunk byteChunk", - "WritableObjectChunk byteChunk"); - lines = simpleFixup(lines, "fillByUnRowSequence", "byteChunk", "booleanObjectChunk"); + lines = simpleFixup(lines, "fillByUnRowSequence", "WritableObjectChunk chunk", + "WritableObjectChunk chunk"); lines = simpleFixup(lines, "fillByUnRowSequence", "BooleanUtils\\.byteAsBoolean\\(blockToUse == null \\? NULL_BOOLEAN : blockToUse\\[indexWithinBlock\\]\\)", "blockToUse == null ? NULL_BOOLEAN : BooleanUtils.byteAsBoolean(blockToUse[indexWithinBlock])"); @@ -1038,7 +1498,7 @@ private static void replicateSparseBooleanSource() throws IOException { FileUtils.writeLines(booleanFile, lines); } - private static void replicateSparseObjectSource() throws IOException { + private static void replicateObjectSparseArraySource() throws IOException { final String objectPath = charToObject( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java"); final File objectFile = new File(objectPath); @@ -1050,8 +1510,10 @@ private static void replicateSparseObjectSource() throws IOException { lines = globalReplacements(lines, "ObjectOneOrN.Block([0-2])", "ObjectOneOrN.Block$1"); lines = globalReplacements(lines, - "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", - "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", + "public class ObjectSparseArraySource extends SparseArrayColumnSource", + "public class ObjectSparseArraySource extends SparseArrayColumnSource", + "implements MutableColumnSourceGetDefaults.ForObject", + "implements MutableColumnSourceGetDefaults.ForObject", "Object[ ]?\\[\\]", "T []", "NULL_OBJECT", "null", "getObject", "get", diff --git a/replication/util/src/main/java/io/deephaven/replication/ReplicatePrimitiveCode.java b/replication/util/src/main/java/io/deephaven/replication/ReplicatePrimitiveCode.java index 1716a6aa594..27014bd897b 100644 --- a/replication/util/src/main/java/io/deephaven/replication/ReplicatePrimitiveCode.java +++ b/replication/util/src/main/java/io/deephaven/replication/ReplicatePrimitiveCode.java @@ -117,6 +117,12 @@ public static String charToLong(String sourceClassJavaPath, Map se "Long", "Long", "long", "long", "LONG"); } + public static String charToInstant(String sourceClassJavaPath, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClassJavaPath, serialVersionUIDs, exemptions, + "Instant", "Instant", "Instant", "Instant", "INSTANT"); + } + public static String charToShort(String sourceClassJavaPath, Map serialVersionUIDs, String... exemptions) throws IOException { return replicateCodeBasedOnChar(sourceClassJavaPath, serialVersionUIDs, exemptions, @@ -311,6 +317,10 @@ public static String charToLong(String sourceClassJavaPath, String... exemptions return charToLong(sourceClassJavaPath, null, exemptions); } + public static String charToInstant(String sourceClassJavaPath, String... exemptions) throws IOException { + return charToInstant(sourceClassJavaPath, null, exemptions); + } + public static void charToAllButBooleanAndLong(String sourceClassJavaPath, String... exemptions) throws IOException { charToAllButBooleanAndLong(sourceClassJavaPath, null, exemptions); diff --git a/replication/util/src/main/java/io/deephaven/replication/ReplicationUtils.java b/replication/util/src/main/java/io/deephaven/replication/ReplicationUtils.java index e83e4f112f5..5a9cda2ed06 100644 --- a/replication/util/src/main/java/io/deephaven/replication/ReplicationUtils.java +++ b/replication/util/src/main/java/io/deephaven/replication/ReplicationUtils.java @@ -32,8 +32,8 @@ public static List applyFixup(List lines, final String region, f final Function> replacer) { final List newLines = new ArrayList<>(); - final Pattern startPattern = Pattern.compile("// region " + region); - final Pattern endPattern = Pattern.compile("// endregion " + region); + final Pattern startPattern = constructRegionStartPattern(region); + final Pattern endPattern = constructRegionEndPattern(region); final Pattern replacePattern = Pattern.compile(searchPattern); @@ -73,7 +73,7 @@ public static List applyFixup(List lines, final String region, f /** * Take a list of lines; and apply a given fixup expressed as a code region and replacements - * + * * @param lines the input lines * @param region the name of the region started by "// region <name>" and ended by "// endregion <name>" * @param replacements an array with an even number of elements, even elements are a thing to replace, the next @@ -84,8 +84,8 @@ public static List applyFixup(List lines, final String region, f public static List simpleFixup(List lines, final String region, final String... replacements) { final List newLines = new ArrayList<>(); - final Pattern startPattern = Pattern.compile("// region " + region); - final Pattern endPattern = Pattern.compile("// endregion " + region); + final Pattern startPattern = constructRegionStartPattern(region); + final Pattern endPattern = constructRegionEndPattern(region); boolean inRegion = false; for (String line : lines) { @@ -146,8 +146,8 @@ public static List standardCleanups(List lines) { public static List insertRegion(List lines, final String region, List extraLines) { final List newLines = new ArrayList<>(); - final Pattern startPattern = Pattern.compile("// region " + region); - final Pattern endPattern = Pattern.compile("// endregion " + region); + final Pattern startPattern = constructRegionStartPattern(region); + final Pattern endPattern = constructRegionEndPattern(region); boolean inRegion = false; for (String line : lines) { @@ -199,11 +199,29 @@ public static List removeRegion(List lines, final String region) */ @NotNull public static List replaceRegion(List lines, final String region, List replacement) { + return replaceRegion(lines, region, l -> replacement); + } + + /** + * Locates the region demarked by "// region <name>" and ended by "// endregion <name>" and replaces the + * text with the contents of replacement. + * + * @param lines the lines to process + * @param region the name of the region + * @param replacement the lines to insert + * @return a new list of lines + */ + @NotNull + public static List replaceRegion( + List lines, + final String region, + Function, List> replacement) { final List newLines = new ArrayList<>(); - final Pattern startPattern = Pattern.compile("//\\s*region " + region); - final Pattern endPattern = Pattern.compile("//\\s*endregion " + region); + final Pattern startPattern = constructRegionStartPattern(region); + final Pattern endPattern = constructRegionEndPattern(region); + final List currentRegion = new ArrayList<>(); boolean inRegion = false; for (String line : lines) { if (startPattern.matcher(line).find()) { @@ -211,17 +229,18 @@ public static List replaceRegion(List lines, final String region throw new IllegalStateException(); } newLines.add(line); - newLines.addAll(replacement); inRegion = true; - } - if (endPattern.matcher(line).find()) { + } else if (endPattern.matcher(line).find()) { if (!inRegion) { throw new IllegalStateException(); } inRegion = false; - } - if (!inRegion) { + newLines.addAll(replacement.apply(currentRegion)); + newLines.add(line); + } else if (!inRegion) { newLines.add(line); + } else { + currentRegion.add(line); } } @@ -232,6 +251,16 @@ public static List replaceRegion(List lines, final String region return newLines; } + @NotNull + private static Pattern constructRegionStartPattern(String region) { + return Pattern.compile("//\\s*region " + region + "(?=\\s|$)"); + } + + @NotNull + private static Pattern constructRegionEndPattern(String region) { + return Pattern.compile("//\\s*endregion " + region + "(?=\\s|$)"); + } + public static List globalReplacements(int skip, List lines, String... replacements) { if (replacements.length == 0 || replacements.length % 2 != 0) { throw new IllegalArgumentException("Bad replacement length: " + replacements.length); @@ -306,7 +335,7 @@ private static List removeAnyImports(List lines, List p return newLines; } - static private String doLineReplacements(String x, String... replacements) { + private static String doLineReplacements(String x, String... replacements) { if (replacements.length % 2 != 0) { throw new IllegalStateException("Replacmement length is not even!"); } From 9bf67ac93c292215bc6447c4ccfde8cc793d112a Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 31 Jan 2023 14:25:48 -0700 Subject: [PATCH 02/14] Port for RegionedColumnSources --- .../regioned/RegionedColumnSourceBoolean.java | 10 +- .../regioned/RegionedColumnSourceByte.java | 43 +++---- .../regioned/RegionedColumnSourceChar.java | 31 +---- .../RegionedColumnSourceDateTime.java | 60 ++++++++- .../regioned/RegionedColumnSourceDouble.java | 31 +---- .../regioned/RegionedColumnSourceFloat.java | 31 +---- .../regioned/RegionedColumnSourceInstant.java | 96 +++++++++++++++ .../regioned/RegionedColumnSourceInt.java | 31 +---- .../regioned/RegionedColumnSourceLong.java | 94 +++++++++++---- .../RegionedColumnSourceReferencing.java | 88 ++++++-------- .../regioned/RegionedColumnSourceShort.java | 31 +---- .../RegionedColumnSourceZonedDateTime.java | 113 +++++++++++++++++ .../RegionedTableComponentFactoryImpl.java | 5 +- .../TestRegionedColumnSourceBoolean.java | 6 +- .../TestRegionedColumnSourceByte.java | 3 +- .../TestRegionedColumnSourceChar.java | 3 +- .../TestRegionedColumnSourceDateTime.java | 6 +- .../TestRegionedColumnSourceDouble.java | 3 +- .../TestRegionedColumnSourceFloat.java | 3 +- .../regioned/TestRegionedColumnSourceInt.java | 3 +- .../TestRegionedColumnSourceLong.java | 3 +- .../TestRegionedColumnSourceShort.java | 3 +- .../TstRegionedColumnSourceObject.java | 2 +- .../TstRegionedColumnSourcePrimitive.java | 25 ++-- .../TstRegionedColumnSourceReferencing.java | 31 ++--- .../ReplicateRegionsAndRegionedSources.java | 114 +++++++++++++++++- 26 files changed, 568 insertions(+), 301 deletions(-) create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceBoolean.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceBoolean.java index 137891918cf..351a97d8908 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceBoolean.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceBoolean.java @@ -8,6 +8,7 @@ import io.deephaven.engine.table.impl.ColumnSourceGetDefaults; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.RowSequence; +import org.jetbrains.annotations.NotNull; /** * Regioned column source implementation for columns of Booleans. @@ -17,8 +18,11 @@ final class RegionedColumnSourceBoolean implements ColumnSourceGetDefaults.ForBoolean { public RegionedColumnSourceBoolean() { - super(ColumnRegionByte.createNull(PARAMETERS.regionMask), Boolean.class, - RegionedColumnSourceByte.NativeType.AsValues::new); + this(new RegionedColumnSourceByte.AsValues()); + } + + public RegionedColumnSourceBoolean(final @NotNull RegionedColumnSourceByte inner) { + super(ColumnRegionByte.createNull(PARAMETERS.regionMask), Boolean.class, inner); } @Override @@ -39,6 +43,6 @@ public void convertRegion(WritableChunk destination, @Override public Boolean get(long rowKey) { return rowKey == RowSequence.NULL_ROW_KEY ? null : - BooleanUtils.byteAsBoolean(lookupRegion(rowKey).getReferencedRegion().getByte(rowKey)); + BooleanUtils.byteAsBoolean(getNativeSource().lookupRegion(rowKey).getByte(rowKey)); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceByte.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceByte.java index d6d8531302f..d617073347a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceByte.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceByte.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources.regioned; +import io.deephaven.engine.table.ColumnSource; + import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.impl.locations.ColumnLocation; @@ -24,7 +26,7 @@ */ abstract class RegionedColumnSourceByte extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForByte { + implements ColumnSourceGetDefaults.ForByte /* MIXIN_INTERFACES */ { RegionedColumnSourceByte(@NotNull final ColumnRegionByte nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,35 +50,22 @@ default ColumnRegionByte makeRegion(@NotNull final ColumnDefinition c } } - static final class AsValues extends RegionedColumnSourceByte implements MakeRegionDefault { - AsValues() { - super(ColumnRegionByte.createNull(PARAMETERS.regionMask), DeferredColumnRegionByte::new); - } + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull Class alternateDataType) { + return alternateDataType == boolean.class || alternateDataType == Boolean.class || super.allowsReinterpret(alternateDataType); } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native byte type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForByte { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Byte.class, outerColumnSource); - } - - @Override - public byte getByte(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getByte(rowKey); - } + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + //noinspection unchecked + return (ColumnSource) new RegionedColumnSourceBoolean((RegionedColumnSourceByte)this); + } + // endregion reinterpretation - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } + static final class AsValues extends RegionedColumnSourceByte implements MakeRegionDefault { + AsValues() { + super(ColumnRegionByte.createNull(PARAMETERS.regionMask), DeferredColumnRegionByte::new); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceChar.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceChar.java index 99fbddcc771..0316daefc43 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceChar.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceChar.java @@ -19,7 +19,7 @@ */ abstract class RegionedColumnSourceChar extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForChar { + implements ColumnSourceGetDefaults.ForChar /* MIXIN_INTERFACES */ { RegionedColumnSourceChar(@NotNull final ColumnRegionChar nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -43,38 +43,15 @@ default ColumnRegionChar makeRegion(@NotNull final ColumnDefinition c } } + // region reinterpretation + // endregion reinterpretation + static final class AsValues extends RegionedColumnSourceChar implements MakeRegionDefault { AsValues() { super(ColumnRegionChar.createNull(PARAMETERS.regionMask), DeferredColumnRegionChar::new); } } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native char type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForChar { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Character.class, outerColumnSource); - } - - @Override - public char getChar(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getChar(rowKey); - } - - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } - } - } - static final class Partitioning extends RegionedColumnSourceChar { Partitioning() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java index e000c4f9f0f..0eb4edd9eb0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java @@ -4,23 +4,34 @@ package io.deephaven.engine.table.impl.sources.regioned; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; import io.deephaven.time.DateTime; import io.deephaven.time.DateTimeUtils; import io.deephaven.engine.table.impl.ColumnSourceGetDefaults; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.RowSequence; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; /** * Regioned column source implementation for columns of {@link DateTime}s. */ final class RegionedColumnSourceDateTime - extends - RegionedColumnSourceReferencing> - implements ColumnSourceGetDefaults.ForObject { + extends RegionedColumnSourceReferencing> + implements ColumnSourceGetDefaults.ForObject, ConvertableTimeSource { public RegionedColumnSourceDateTime() { - super(ColumnRegionLong.createNull(PARAMETERS.regionMask), DateTime.class, - RegionedColumnSourceLong.NativeType.AsValues::new); + this(new RegionedColumnSourceLong.AsValues()); + } + + public RegionedColumnSourceDateTime(@NotNull final RegionedColumnSourceLong inner) { + super(ColumnRegionLong.createNull(PARAMETERS.regionMask), DateTime.class, inner); } @Override @@ -41,6 +52,43 @@ public void convertRegion(WritableChunk destination, @Override public DateTime get(long rowKey) { return rowKey == RowSequence.NULL_ROW_KEY ? null - : DateTimeUtils.nanosToTime(lookupRegion(rowKey).getReferencedRegion().getLong(rowKey)); + : DateTimeUtils.nanosToTime(getNativeSource().lookupRegion(rowKey).getLong(rowKey)); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toInstant() { + return new RegionedColumnSourceInstant((RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toZonedDateTime(@NotNull final ZoneId zone) { + return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalDate(@NotNull final ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalDate(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalTime(@NotNull final ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalTime(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toDateTime() { + return this; + } + + @Override + public ColumnSource toEpochNano() { + return getNativeSource(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDouble.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDouble.java index 6f031254366..6c50a7cc3a0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDouble.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDouble.java @@ -24,7 +24,7 @@ */ abstract class RegionedColumnSourceDouble extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForDouble { + implements ColumnSourceGetDefaults.ForDouble /* MIXIN_INTERFACES */ { RegionedColumnSourceDouble(@NotNull final ColumnRegionDouble nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,38 +48,15 @@ default ColumnRegionDouble makeRegion(@NotNull final ColumnDefinition } } + // region reinterpretation + // endregion reinterpretation + static final class AsValues extends RegionedColumnSourceDouble implements MakeRegionDefault { AsValues() { super(ColumnRegionDouble.createNull(PARAMETERS.regionMask), DeferredColumnRegionDouble::new); } } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native double type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForDouble { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Double.class, outerColumnSource); - } - - @Override - public double getDouble(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getDouble(rowKey); - } - - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } - } - } - static final class Partitioning extends RegionedColumnSourceDouble { Partitioning() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceFloat.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceFloat.java index 406e3818272..ae8bb9184dc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceFloat.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceFloat.java @@ -24,7 +24,7 @@ */ abstract class RegionedColumnSourceFloat extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForFloat { + implements ColumnSourceGetDefaults.ForFloat /* MIXIN_INTERFACES */ { RegionedColumnSourceFloat(@NotNull final ColumnRegionFloat nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,38 +48,15 @@ default ColumnRegionFloat makeRegion(@NotNull final ColumnDefinition } } + // region reinterpretation + // endregion reinterpretation + static final class AsValues extends RegionedColumnSourceFloat implements MakeRegionDefault { AsValues() { super(ColumnRegionFloat.createNull(PARAMETERS.regionMask), DeferredColumnRegionFloat::new); } } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native float type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForFloat { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Float.class, outerColumnSource); - } - - @Override - public float getFloat(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getFloat(rowKey); - } - - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } - } - } - static final class Partitioning extends RegionedColumnSourceFloat { Partitioning() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java new file mode 100644 index 00000000000..6f1d1414fe5 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java @@ -0,0 +1,96 @@ +package io.deephaven.engine.table.impl.sources.regioned; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Regioned column source implementation for columns of {@link Instant}s. + */ +final class RegionedColumnSourceInstant + extends RegionedColumnSourceReferencing> + implements ColumnSourceGetDefaults.ForObject, ConvertableTimeSource { + + public RegionedColumnSourceInstant() { + this(new RegionedColumnSourceLong.AsValues()); + } + + public RegionedColumnSourceInstant(final @NotNull RegionedColumnSourceLong inner) { + super(ColumnRegionLong.createNull(PARAMETERS.regionMask), Instant.class, inner); + } + + @Override + public void convertRegion( + WritableChunk destination, + Chunk source, + RowSequence rowSequence) { + WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); + LongChunk longChunk = source.asLongChunk(); + + final int size = objectChunk.size(); + final int length = longChunk.size(); + + for (int i = 0; i < length; ++i) { + objectChunk.set(size + i, DateTimeUtils.makeInstant(longChunk.get(i))); + } + objectChunk.setSize(size + length); + } + + @Override + public Instant get(long elementIndex) { + return elementIndex == RowSequence.NULL_ROW_KEY ? null + : DateTimeUtils.makeInstant(getNativeSource().lookupRegion(elementIndex).getLong(elementIndex)); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toInstant() { + return this; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalDate(ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalDate(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalTime(ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalTime(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toDateTime() { + return new RegionedColumnSourceDateTime((RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toEpochNano() { + return getNativeSource(); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInt.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInt.java index 74afce7f362..f07e2d05733 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInt.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInt.java @@ -24,7 +24,7 @@ */ abstract class RegionedColumnSourceInt extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForInt { + implements ColumnSourceGetDefaults.ForInt /* MIXIN_INTERFACES */ { RegionedColumnSourceInt(@NotNull final ColumnRegionInt nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,38 +48,15 @@ default ColumnRegionInt makeRegion(@NotNull final ColumnDefinition co } } + // region reinterpretation + // endregion reinterpretation + static final class AsValues extends RegionedColumnSourceInt implements MakeRegionDefault { AsValues() { super(ColumnRegionInt.createNull(PARAMETERS.regionMask), DeferredColumnRegionInt::new); } } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native int type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForInt { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Integer.class, outerColumnSource); - } - - @Override - public int getInt(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getInt(rowKey); - } - - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } - } - } - static final class Partitioning extends RegionedColumnSourceInt { Partitioning() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceLong.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceLong.java index 621493fc4ac..e303b00a395 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceLong.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceLong.java @@ -8,6 +8,18 @@ */ package io.deephaven.engine.table.impl.sources.regioned; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; + +import io.deephaven.time.DateTime; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.sources.LocalDateWrapperSource; +import io.deephaven.engine.table.impl.sources.LocalTimeWrapperSource; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; + import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.impl.locations.ColumnLocation; @@ -24,7 +36,7 @@ */ abstract class RegionedColumnSourceLong extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForLong { + implements ColumnSourceGetDefaults.ForLong ,ConvertableTimeSource { RegionedColumnSourceLong(@NotNull final ColumnRegionLong nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,35 +60,69 @@ default ColumnRegionLong makeRegion(@NotNull final ColumnDefinition c } } - static final class AsValues extends RegionedColumnSourceLong implements MakeRegionDefault { - AsValues() { - super(ColumnRegionLong.createNull(PARAMETERS.regionMask), DeferredColumnRegionLong::new); + // region reinterpretation + @Override + public boolean allowsReinterpret(@NotNull Class alternateDataType) { + if(super.allowsReinterpret(alternateDataType)) { + return true; } + + return alternateDataType == Instant.class || + alternateDataType == DateTime.class; } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native long type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForLong { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Long.class, outerColumnSource); + @SuppressWarnings("unchecked") + @Override + protected ColumnSource doReinterpret(@NotNull Class alternateDataType) { + if(alternateDataType == Instant.class) { + return (ColumnSource) toInstant(); + } else if(alternateDataType == DateTime.class) { + return (ColumnSource) toDateTime(); } - @Override - public long getLong(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getLong(rowKey); - } + return super.doReinterpret(alternateDataType); + } - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } + @Override + public boolean supportsTimeConversion() { + return true; + } + + public ColumnSource toInstant() { + //noinspection unchecked + return new RegionedColumnSourceInstant((RegionedColumnSourceLong) this); + } + + public ColumnSource toDateTime() { + //noinspection unchecked + return new RegionedColumnSourceDateTime((RegionedColumnSourceLong) this); + } + + @Override + public ColumnSource toZonedDateTime(ZoneId zone) { + //noinspection unchecked + return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) this); + } + + @Override + public ColumnSource toLocalTime(ZoneId zone) { + return new LocalTimeWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toLocalDate(ZoneId zone) { + return new LocalDateWrapperSource(toZonedDateTime(zone), zone); + } + + @Override + public ColumnSource toEpochNano() { + return this; + } + // endregion reinterpretation + + static final class AsValues extends RegionedColumnSourceLong implements MakeRegionDefault { + AsValues() { + super(ColumnRegionLong.createNull(PARAMETERS.regionMask), DeferredColumnRegionLong::new); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceReferencing.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceReferencing.java index 68d1dbc74ce..b963c238557 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceReferencing.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceReferencing.java @@ -7,10 +7,8 @@ import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.impl.locations.ColumnLocation; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.SharedContext; import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; import javax.annotation.OverridingMethodsMustInvokeSuper; @@ -19,23 +17,21 @@ * underlying native column and its resources. */ abstract class RegionedColumnSourceReferencing> - extends RegionedColumnSourceArray> + extends RegionedColumnSourceBase> implements ColumnRegionReferencingImpl.Converter { - @FunctionalInterface - interface NativeSourceCreator> { - NativeColumnSource create( - RegionedColumnSourceBase> outerSource); - } + @NotNull + private final ColumnRegionReferencing.Null nullRegion; @NotNull - private final NativeColumnSource nativeSource; + private final RegionedColumnSourceBase nativeSource; RegionedColumnSourceReferencing(@NotNull final NATIVE_REGION_TYPE nullRegion, @NotNull Class type, - @NotNull NativeSourceCreator nativeSourceCreator) { - super(new ColumnRegionReferencing.Null<>(nullRegion), type, DeferredColumnRegionReferencing::new); - nativeSource = nativeSourceCreator.create(this); + @NotNull RegionedColumnSourceBase nativeSource) { + super(type); + this.nullRegion = new ColumnRegionReferencing.Null<>(nullRegion); + this.nativeSource = nativeSource; } @Override @@ -55,55 +51,43 @@ protected ColumnSource doReinterpret( } @Override - @Nullable - public ColumnRegionReferencing makeRegion(@NotNull ColumnDefinition columnDefinition, - @NotNull ColumnLocation columnLocation, int regionIndex) { - NATIVE_REGION_TYPE nativeRegionType = nativeSource.makeRegion(columnDefinition, columnLocation, regionIndex); - return nativeRegionType == null ? null : new ColumnRegionReferencingImpl<>(nativeRegionType); - } + public ColumnRegionReferencing getRegion(int regionIndex) { + final NATIVE_REGION_TYPE nativeRegion = nativeSource.getRegion(regionIndex); + if (nativeRegion == nativeSource.getNullRegion()) { + return nullRegion; + } - final ChunkSource.FillContext makeFillContext(ColumnRegionReferencing.Converter converter, int chunkCapacity, - SharedContext sharedContext) { - return new ColumnRegionReferencingImpl.FillContext<>(nativeSource, converter, chunkCapacity, sharedContext); + return new ColumnRegionReferencingImpl<>(nativeRegion); } @Override - public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { - return makeFillContext(this, chunkCapacity, sharedContext); + public int getRegionCount() { + return nativeSource.getRegionCount(); } - abstract static class NativeColumnSource> - extends - RegionedColumnSourceInner> - implements MakeRegion { - - NativeColumnSource(@NotNull Class type, - RegionedColumnSourceBase> outerColumnSource) { - super(type, outerColumnSource); - } + @Override + public int addRegion(@NotNull ColumnDefinition columnDefinition, @NotNull ColumnLocation columnLocation) { + return nativeSource.addRegion(columnDefinition, columnLocation); + } - @Override - @NotNull - NATIVE_REGION_TYPE getNullRegion() { - return getOuterColumnSource().getNullRegion().getReferencedRegion(); - } + @Override + int addRegionForUnitTests(OTHER_REGION_TYPE region) { + return nativeSource.addRegionForUnitTests(region); + } - @Override - public NATIVE_REGION_TYPE getRegion(int regionIndex) { - return getOuterColumnSource().getRegion(regionIndex).getReferencedRegion(); - } + @Override + public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) { + return new ColumnRegionReferencingImpl.FillContext<>(nativeSource, this, chunkCapacity, sharedContext); + } - @Override - public final boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return getOuterColumnSource().getType() == alternateDataType; - } + @NotNull + @Override + public ColumnRegionReferencing.Null getNullRegion() { + return nullRegion; + } - @Override - protected final ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { - // noinspection unchecked - return (ColumnSource) getOuterColumnSource(); - } + @NotNull + public RegionedColumnSourceBase getNativeSource() { + return nativeSource; } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceShort.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceShort.java index 9fa3bfe83c0..181984c3ea3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceShort.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceShort.java @@ -24,7 +24,7 @@ */ abstract class RegionedColumnSourceShort extends RegionedColumnSourceArray> - implements ColumnSourceGetDefaults.ForShort { + implements ColumnSourceGetDefaults.ForShort /* MIXIN_INTERFACES */ { RegionedColumnSourceShort(@NotNull final ColumnRegionShort nullRegion, @NotNull final MakeDeferred> makeDeferred) { @@ -48,38 +48,15 @@ default ColumnRegionShort makeRegion(@NotNull final ColumnDefinition } } + // region reinterpretation + // endregion reinterpretation + static final class AsValues extends RegionedColumnSourceShort implements MakeRegionDefault { AsValues() { super(ColumnRegionShort.createNull(PARAMETERS.regionMask), DeferredColumnRegionShort::new); } } - /** - * These are used by {@link RegionedColumnSourceReferencing} subclass who want a native short type. This class does - * not hold an array of regions, but rather derives from {@link RegionedColumnSourceBase}, accessing its - * regions by looking into the delegate instance's region array. - */ - @SuppressWarnings("unused") - static abstract class NativeType - extends RegionedColumnSourceReferencing.NativeColumnSource> - implements ColumnSourceGetDefaults.ForShort { - - NativeType(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(Short.class, outerColumnSource); - } - - @Override - public short getShort(final long rowKey) { - return (rowKey == RowSequence.NULL_ROW_KEY ? getNullRegion() : lookupRegion(rowKey)).getShort(rowKey); - } - - static final class AsValues extends NativeType implements MakeRegionDefault { - AsValues(@NotNull final RegionedColumnSourceBase>> outerColumnSource) { - super(outerColumnSource); - } - } - } - static final class Partitioning extends RegionedColumnSourceShort { Partitioning() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java new file mode 100644 index 00000000000..8491d4e98ac --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java @@ -0,0 +1,113 @@ +package io.deephaven.engine.table.impl.sources.regioned; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.impl.ColumnSourceGetDefaults; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; +import io.deephaven.engine.table.impl.sources.LocalDateWrapperSource; +import io.deephaven.engine.table.impl.sources.LocalTimeWrapperSource; +import io.deephaven.time.DateTime; +import io.deephaven.time.DateTimeUtils; +import org.jetbrains.annotations.NotNull; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; + +/** + * Regioned column source implementation for columns of {@link ZonedDateTime}s. + */ +final class RegionedColumnSourceZonedDateTime + extends RegionedColumnSourceReferencing> + implements ColumnSourceGetDefaults.ForObject, ConvertableTimeSource, + ConvertableTimeSource.Zoned { + private final ZoneId zone; + + public static ColumnSource asLocalDate(ZoneId zone, RegionedColumnSourceLong inner) { + return new LocalDateWrapperSource(new RegionedColumnSourceZonedDateTime(zone, inner), zone); + } + + public static ColumnSource asLocalTime(ZoneId zone, RegionedColumnSourceLong inner) { + return new LocalTimeWrapperSource(new RegionedColumnSourceZonedDateTime(zone, inner), zone); + } + + public RegionedColumnSourceZonedDateTime(final @NotNull ZoneId zone, + final @NotNull RegionedColumnSourceLong inner) { + super(ColumnRegionLong.createNull(PARAMETERS.regionMask), ZonedDateTime.class, inner); + this.zone = zone; + } + + @Override + public void convertRegion(WritableChunk destination, + Chunk source, RowSequence rowSequence) { + WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); + LongChunk longChunk = source.asLongChunk(); + + final int size = objectChunk.size(); + final int length = longChunk.size(); + + for (int i = 0; i < length; ++i) { + objectChunk.set(size + i, DateTimeUtils.makeZonedDateTime(longChunk.get(i), zone)); + } + objectChunk.setSize(size + length); + } + + @Override + public ZonedDateTime get(long elementIndex) { + return elementIndex == RowSequence.NULL_ROW_KEY ? null + : DateTimeUtils.makeZonedDateTime(getNativeSource().lookupRegion(elementIndex).getLong(elementIndex), + zone); + } + + @Override + public ColumnSource toInstant() { + return new RegionedColumnSourceInstant((RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public boolean supportsTimeConversion() { + return true; + } + + @Override + public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + if (this.zone.equals(zone)) { + return this; + } + return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalDate(ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalDate(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toLocalTime(ZoneId zone) { + return RegionedColumnSourceZonedDateTime.asLocalTime(zone, + (RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toDateTime() { + return new RegionedColumnSourceDateTime((RegionedColumnSourceLong) getNativeSource()); + } + + @Override + public ColumnSource toEpochNano() { + return getNativeSource(); + } + + @Override + public ZoneId getZone() { + return zone; + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedTableComponentFactoryImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedTableComponentFactoryImpl.java index 2f42cf81a0f..be02ad6acb1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedTableComponentFactoryImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedTableComponentFactoryImpl.java @@ -4,12 +4,14 @@ package io.deephaven.engine.table.impl.sources.regioned; import io.deephaven.engine.table.ColumnDefinition; +import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.time.DateTime; import io.deephaven.engine.table.impl.ColumnSourceManager; import io.deephaven.engine.table.impl.ColumnToCodecMappings; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.NotNull; +import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -34,6 +36,7 @@ public class RegionedTableComponentFactoryImpl implements RegionedTableComponent typeToSupplier.put(Short.class, RegionedColumnSourceShort.AsValues::new); typeToSupplier.put(Boolean.class, RegionedColumnSourceBoolean::new); typeToSupplier.put(DateTime.class, RegionedColumnSourceDateTime::new); + typeToSupplier.put(Instant.class, RegionedColumnSourceInstant::new); SIMPLE_DATA_TYPE_TO_REGIONED_COLUMN_SOURCE_SUPPLIER = Collections.unmodifiableMap(typeToSupplier); } @@ -80,7 +83,7 @@ public RegionedColumnSource createRegionedColumnSource( return new RegionedColumnSourceObject.AsValues<>(dataType, columnDefinition.getComponentType()); } } catch (IllegalArgumentException except) { - throw new UnsupportedOperationException( + throw new TableDataException( "Can't create column for " + dataType + " in column definition " + columnDefinition, except); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceBoolean.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceBoolean.java index 7894e71fc0e..250ea22c08a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceBoolean.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceBoolean.java @@ -36,9 +36,7 @@ private void assertLookup(final long elementIndex, final boolean boxed, final boolean reinterpreted) { checking(new Expectations() {{ - oneOf(cr[expectedRegionIndex]).getReferencedRegion(); - will(returnValue(cr_n[expectedRegionIndex])); - oneOf(cr_n[expectedRegionIndex]).getByte(elementIndex); + oneOf(cr[expectedRegionIndex]).getByte(elementIndex); will(returnValue(BooleanUtils.booleanAsByte(output))); }}); if (reinterpreted) { @@ -65,7 +63,7 @@ public void setUp() throws Exception { assertEquals(Boolean.class, SUT.getType()); SUT_AS_BYTE = SUT.reinterpret(byte.class); assertEquals(byte.class, SUT_AS_BYTE.getType()); - assertEquals(SUT, SUT_AS_BYTE.reinterpret(Boolean.class)); + assertEquals(RegionedColumnSourceBoolean.class, SUT_AS_BYTE.reinterpret(Boolean.class).getClass()); } @Override diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceByte.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceByte.java index 0d9de1b654f..de7f044b431 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceByte.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceByte.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceByte}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceByte extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceByte extends TstRegionedColumnSourcePrimitive< + Byte, Values, ColumnRegionByte, ColumnRegionByte> { public TestRegionedColumnSourceByte() { super(ColumnRegionByte.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceChar.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceChar.java index 74f91cb7669..2785427dfde 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceChar.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceChar.java @@ -11,7 +11,8 @@ * Test class for {@link RegionedColumnSourceChar}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceChar extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceChar extends TstRegionedColumnSourcePrimitive< + Character, Values, ColumnRegionChar, ColumnRegionChar> { public TestRegionedColumnSourceChar() { super(ColumnRegionChar.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDateTime.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDateTime.java index 53eb13a569c..97593d9916e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDateTime.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDateTime.java @@ -41,9 +41,7 @@ private void assertLookup(final long elementIndex, final boolean reinterpreted) { checking(new Expectations() { { - oneOf(cr[expectedRegionIndex]).getReferencedRegion(); - will(returnValue(cr_n[expectedRegionIndex])); - oneOf(cr_n[expectedRegionIndex]).getLong(elementIndex); + oneOf(cr[expectedRegionIndex]).getLong(elementIndex); will(returnValue(output == null ? QueryConstants.NULL_LONG : output.getNanos())); } }); @@ -64,7 +62,7 @@ public void setUp() throws Exception { assertEquals(DateTime.class, SUT.getType()); SUT_AS_LONG = SUT.reinterpret(long.class); assertEquals(long.class, SUT_AS_LONG.getType()); - assertEquals(SUT, SUT_AS_LONG.reinterpret(DateTime.class)); + assertEquals(RegionedColumnSourceDateTime.class, SUT_AS_LONG.reinterpret(DateTime.class).getClass()); } @Override diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDouble.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDouble.java index bb38bfef16e..c5b882a92dc 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDouble.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceDouble.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceDouble}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceDouble extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceDouble extends TstRegionedColumnSourcePrimitive< + Double, Values, ColumnRegionDouble, ColumnRegionDouble> { public TestRegionedColumnSourceDouble() { super(ColumnRegionDouble.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceFloat.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceFloat.java index 297503dd3e3..5f4431b0c4b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceFloat.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceFloat.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceFloat}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceFloat extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceFloat extends TstRegionedColumnSourcePrimitive< + Float, Values, ColumnRegionFloat, ColumnRegionFloat> { public TestRegionedColumnSourceFloat() { super(ColumnRegionFloat.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceInt.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceInt.java index a888d5bdd38..2791e6ad8e0 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceInt.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceInt.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceInt}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceInt extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceInt extends TstRegionedColumnSourcePrimitive< + Integer, Values, ColumnRegionInt, ColumnRegionInt> { public TestRegionedColumnSourceInt() { super(ColumnRegionInt.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceLong.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceLong.java index 48bd9b41120..f91469dbb50 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceLong.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceLong.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceLong}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceLong extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceLong extends TstRegionedColumnSourcePrimitive< + Long, Values, ColumnRegionLong, ColumnRegionLong> { public TestRegionedColumnSourceLong() { super(ColumnRegionLong.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceShort.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceShort.java index a1ed4c7a62c..fbe68b57015 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceShort.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TestRegionedColumnSourceShort.java @@ -16,7 +16,8 @@ * Test class for {@link RegionedColumnSourceShort}. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceShort extends TstRegionedColumnSourcePrimitive> { +public class TestRegionedColumnSourceShort extends TstRegionedColumnSourcePrimitive< + Short, Values, ColumnRegionShort, ColumnRegionShort> { public TestRegionedColumnSourceShort() { super(ColumnRegionShort.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceObject.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceObject.java index 85b0dbde8bf..55a04f0aa67 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceObject.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceObject.java @@ -11,7 +11,7 @@ */ @SuppressWarnings({"JUnit4AnnotatedMethodInJUnit3TestCase"}) public abstract class TstRegionedColumnSourceObject extends TstRegionedColumnSourcePrimitive> { + ColumnRegionObject, ColumnRegionObject> { TstRegionedColumnSourceObject(Value[] values) { super(ColumnRegionObject.class); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourcePrimitive.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourcePrimitive.java index a599ff7ba4f..fcc5ee682a5 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourcePrimitive.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourcePrimitive.java @@ -25,7 +25,7 @@ * Base class for testing {@link RegionedColumnSourceArray} implementations. */ @SuppressWarnings({"AnonymousInnerClassMayBeStatic", "JUnit4AnnotatedMethodInJUnit3TestCase"}) -public abstract class TstRegionedColumnSourcePrimitive> +public abstract class TstRegionedColumnSourcePrimitive, CS_REGION_TYPE extends ColumnRegion> extends BaseCachedJMockTestCase { static final byte[] TEST_BYTES = @@ -46,7 +46,7 @@ public abstract class TstRegionedColumnSourcePrimitive SUT; + RegionedColumnSourceBase SUT; private final Class regionTypeClass; @@ -88,6 +88,11 @@ public void testOverflow() { } } + REGION_TYPE doLookupRegion(long elementRowKey) { + // noinspection unchecked + return (REGION_TYPE) SUT.lookupRegion(elementRowKey); + } + @Test public void testAddRegions() { // Test validity checks. @@ -99,31 +104,31 @@ public void testAddRegions() { // Add the 0th region. SUT.addRegionForUnitTests(cr[0]); - TestCase.assertEquals(cr[0], SUT.lookupRegion(getFirstRowKey(0))); - TestCase.assertEquals(cr[0], SUT.lookupRegion(getLastRowKey(0))); + TestCase.assertEquals(cr[0], doLookupRegion(getFirstRowKey(0))); + TestCase.assertEquals(cr[0], doLookupRegion(getLastRowKey(0))); // Add the 1st region. SUT.addRegionForUnitTests(cr[1]); - TestCase.assertEquals(cr[1], SUT.lookupRegion(getFirstRowKey(1))); - TestCase.assertEquals(cr[1], SUT.lookupRegion(getLastRowKey(1))); + TestCase.assertEquals(cr[1], doLookupRegion(getFirstRowKey(1))); + TestCase.assertEquals(cr[1], doLookupRegion(getLastRowKey(1))); // Prove that the 2nd region is missing. try { - TestCase.assertNull(SUT.lookupRegion(getFirstRowKey(2))); + TestCase.assertNull(doLookupRegion(getFirstRowKey(2))); } catch (ArrayIndexOutOfBoundsException expected) { } try { - TestCase.assertNull(SUT.lookupRegion(getLastRowKey(2))); + TestCase.assertNull(doLookupRegion(getLastRowKey(2))); } catch (ArrayIndexOutOfBoundsException expected) { } // Prove that 9th region is missing. try { - TestCase.assertNull(SUT.lookupRegion(getFirstRowKey(9))); + TestCase.assertNull(doLookupRegion(getFirstRowKey(9))); } catch (ArrayIndexOutOfBoundsException expected) { } try { - TestCase.assertNull(SUT.lookupRegion(getLastRowKey(9))); + TestCase.assertNull(doLookupRegion(getLastRowKey(9))); } catch (ArrayIndexOutOfBoundsException expected) { } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceReferencing.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceReferencing.java index a0fd6e29309..4e10b22e592 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceReferencing.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/regioned/TstRegionedColumnSourceReferencing.java @@ -4,37 +4,20 @@ package io.deephaven.engine.table.impl.sources.regioned; import io.deephaven.chunk.attributes.Values; -import org.junit.Before; - -import java.lang.reflect.Array; /** * Base class for testing {@link RegionedColumnSourceArray} implementations. */ -@SuppressWarnings({"AnonymousInnerClassMayBeStatic"}) public abstract class TstRegionedColumnSourceReferencing> - extends TstRegionedColumnSourcePrimitive> { - - NATIVE_REGION_TYPE[] cr_n; - - private final Class nativeRegionTypeClass; + extends + TstRegionedColumnSourcePrimitive> { - TstRegionedColumnSourceReferencing(Class nativeRegionTypeClass) { - super(ColumnRegionReferencing.class); - this.nativeRegionTypeClass = nativeRegionTypeClass; + TstRegionedColumnSourceReferencing(Class regionTypeClass) { + super(regionTypeClass); } - @Before - public void setUp() throws Exception { - super.setUp(); - - // noinspection unchecked - cr_n = (NATIVE_REGION_TYPE[]) Array.newInstance(nativeRegionTypeClass, 10); - for (int cri = 0; cri < cr.length; ++cri) { - // noinspection unchecked - cr_n[cri] = (NATIVE_REGION_TYPE) mock(nativeRegionTypeClass, "CR_N_" + cri); - } - - // Sub-classes are responsible for setting up SUT. + @Override + NATIVE_REGION_TYPE doLookupRegion(long index) { + return SUT.lookupRegion(index).getReferencedRegion(); } } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateRegionsAndRegionedSources.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateRegionsAndRegionedSources.java index 3ecccec63bf..cd9586a359b 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateRegionsAndRegionedSources.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateRegionsAndRegionedSources.java @@ -3,15 +3,30 @@ */ package io.deephaven.replicators; -import io.deephaven.replication.ReplicatePrimitiveCode; +import io.deephaven.base.verify.Require; +import org.apache.commons.io.FileUtils; +import java.io.File; import java.io.IOException; +import java.nio.charset.Charset; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.List; +import java.util.function.LongFunction; +import java.util.function.ToLongFunction; + +import static io.deephaven.replication.ReplicatePrimitiveCode.*; +import static io.deephaven.replication.ReplicationUtils.*; /** * Code generation for basic {@link RegionedColumnSource} implementations as well as well as the primary region * interfaces for some primitive types. */ -public class ReplicateRegionsAndRegionedSources extends ReplicatePrimitiveCode { +public class ReplicateRegionsAndRegionedSources { public static void main(String... args) throws IOException { charToAllButBooleanAndByte( @@ -20,7 +35,100 @@ public static void main(String... args) throws IOException { "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/DeferredColumnRegionChar.java"); charToAllButBooleanAndByte( "extensions/parquet/table/src/main/java/io/deephaven/parquet/table/region/ParquetColumnRegionChar.java"); - charToAllButBoolean( + final List paths = charToAllButBoolean( "engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceChar.java"); + fixupLong(paths.stream().filter(p -> p.contains("Long")).findFirst().get()); + fixupByte(paths.stream().filter(p -> p.contains("Byte")).findFirst().get()); + } + + private static void fixupByte(String path) throws IOException { + final File file = new File(path); + List lines = FileUtils.readLines(file, Charset.defaultCharset()); + lines = addImport(lines, "import io.deephaven.engine.table.ColumnSource;"); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", + " return alternateDataType == boolean.class || alternateDataType == Boolean.class || super.allowsReinterpret(alternateDataType);", + " }", + "", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " //noinspection unchecked", + " return (ColumnSource) new RegionedColumnSourceBoolean((RegionedColumnSourceByte)this);", + " }")); + + FileUtils.writeLines(new File(path), lines); + } + + private static void fixupLong(String path) throws IOException { + final File file = new File(path); + List lines = FileUtils.readLines(file, Charset.defaultCharset()); + lines = addImport(lines, "import io.deephaven.time.DateTime;", + "import io.deephaven.engine.table.ColumnSource;", + "import io.deephaven.engine.table.impl.sources.LocalDateWrapperSource;", + "import io.deephaven.engine.table.impl.sources.LocalTimeWrapperSource;", + "import io.deephaven.engine.table.impl.sources.ConvertableTimeSource;"); + lines = addImport(lines, Instant.class, ZonedDateTime.class, LocalDate.class, LocalTime.class, ZoneId.class); + lines = globalReplacements(lines, "/\\*\\s+MIXIN_INTERFACES\\s+\\*/", ",ConvertableTimeSource "); + lines = replaceRegion(lines, "reinterpretation", Arrays.asList( + " @Override", + " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", + " if(super.allowsReinterpret(alternateDataType)) {", + " return true;", + " }", + "", + " return alternateDataType == Instant.class ||", + " alternateDataType == DateTime.class;", + " }", + "", + " @SuppressWarnings(\"unchecked\")", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " if(alternateDataType == Instant.class) {", + " return (ColumnSource) toInstant();", + " } else if(alternateDataType == DateTime.class) {", + " return (ColumnSource) toDateTime();", + " }", + "", + " return super.doReinterpret(alternateDataType);", + " }", + "", + " @Override", + " public boolean supportsTimeConversion() {", + " return true;", + " }", + "", + " public ColumnSource toInstant() {", + " //noinspection unchecked", + " return new RegionedColumnSourceInstant((RegionedColumnSourceLong) this);", + " }", + "", + " public ColumnSource toDateTime() {", + " //noinspection unchecked", + " return new RegionedColumnSourceDateTime((RegionedColumnSourceLong) this);", + " }", + "", + " @Override", + " public ColumnSource toZonedDateTime(ZoneId zone) {", + " //noinspection unchecked", + " return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) this);", + " }", + "", + " @Override", + " public ColumnSource toLocalTime(ZoneId zone) {", + " return new LocalTimeWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toLocalDate(ZoneId zone) {", + " return new LocalDateWrapperSource(toZonedDateTime(zone), zone);", + " }", + "", + " @Override", + " public ColumnSource toEpochNano() {", + " return this;", + " }")); + + FileUtils.writeLines(new File(path), lines); } } From 6eb0175ef6518824d9cf3ecfac02fa3ab70a9b82 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 11:14:47 -0700 Subject: [PATCH 03/14] Charles' first round feedback - general --- .../deephaven/engine/table/ElementSource.java | 48 +++++++----- .../table/impl/AbstractColumnSource.java | 16 ++-- .../by/LongChunkedAddOnlyMinMaxOperator.java | 2 +- .../impl/sources/DateTimeArraySource.java | 2 +- .../sources/DateTimeSparseArraySource.java | 2 +- .../impl/sources/SparseArrayColumnSource.java | 1 - .../engine/table/impl/util/ColumnHolder.java | 2 +- .../table/impl/util/TableTimeConversions.java | 4 +- .../impl/select/TestReinterpretedColumn.java | 75 ++----------------- .../testutil/generator/TestDataGenerator.java | 2 +- .../replicators/ReplicateOperators.java | 2 +- 11 files changed, 55 insertions(+), 101 deletions(-) diff --git a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java index 60cf5444066..b9b92aa3687 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java @@ -8,13 +8,14 @@ /** * A source of element data within a table. - * + * * @param the type of underlying data. */ public interface ElementSource { /** - * Get the value from the source. This may return boxed values for basic types. + * Get the value from the source. This may return boxed values for basic types. RowKeys that are not present are + * undefined. * * @param rowKey the location in key space to get the value from. * @return the value at the rowKey, potentially null. @@ -23,7 +24,7 @@ public interface ElementSource { T get(long rowKey); /** - * Get the value at the rowKey as a Boolean. + * Get the value at the rowKey as a Boolean. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the boolean at the rowKey, potentially null. @@ -32,7 +33,7 @@ public interface ElementSource { Boolean getBoolean(long rowKey); /** - * Get the value at the rowKey as a byte. + * Get the value at the rowKey as a byte. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the boolean at the rowKey, null values are represented by {@link QueryConstants#NULL_BYTE} @@ -40,7 +41,7 @@ public interface ElementSource { byte getByte(long rowKey); /** - * Get the value at the rowKey as a char. + * Get the value at the rowKey as a char. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the char at the rowKey, null values are represented by {@link QueryConstants#NULL_CHAR} @@ -48,7 +49,7 @@ public interface ElementSource { char getChar(long rowKey); /** - * Get the value at the rowKey as a double. + * Get the value at the rowKey as a double. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the double at the rowKey, null values are represented by {@link QueryConstants#NULL_DOUBLE} @@ -56,7 +57,7 @@ public interface ElementSource { double getDouble(long rowKey); /** - * Get the value at the rowKey as a float. + * Get the value at the rowKey as a float. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the float at the rowKey, null values are represented by {@link QueryConstants#NULL_FLOAT} @@ -64,7 +65,7 @@ public interface ElementSource { float getFloat(long rowKey); /** - * Get the value at the rowKey as an int. + * Get the value at the rowKey as an int. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the int at the rowKey, null values are represented by {@link QueryConstants#NULL_INT} @@ -72,7 +73,7 @@ public interface ElementSource { int getInt(long rowKey); /** - * Get the value at the rowKey as a long. + * Get the value at the rowKey as a long. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the long at the rowKey, null values are represented by {@link QueryConstants#NULL_LONG} @@ -80,7 +81,7 @@ public interface ElementSource { long getLong(long rowKey); /** - * Get the value at the rowKey as a short. + * Get the value at the rowKey as a short. RowKeys that are not present are undefined. * * @param rowKey the location in key space to get the value from. * @return the short at the rowKey, null values are represented by {@link QueryConstants#NULL_SHORT} @@ -92,25 +93,27 @@ public interface ElementSource { * {@link io.deephaven.engine.updategraph.UpdateGraphProcessor UGP} * {@link io.deephaven.engine.updategraph.LogicalClock.State#Updating update} cycle to process changes in data. * During {@link io.deephaven.engine.updategraph.LogicalClock.State#Idle normal} operation previous values will be - * identical to {@link #get(long) current} values. + * identical to {@link #get(long) current} values. RowKeys that were not present are undefined. * * @param rowKey the location in key space to get the value from. - * @return the previous value at the rowKey, or null. + * @return the previous value at the rowKey, potentially null. */ @Nullable T getPrev(long rowKey); /** - * Get the previous value at the rowKey as a Boolean. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a Boolean. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. - * @return the previous boolean at the rowKey, or null. + * @return the previous boolean at the rowKey, potentially null. */ @Nullable Boolean getPrevBoolean(long rowKey); /** - * Get the previous value at the rowKey as a byte. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a byte. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. * @return the previous boolean at the rowKey, null values are represented by {@link QueryConstants#NULL_BYTE} @@ -118,7 +121,8 @@ public interface ElementSource { byte getPrevByte(long rowKey); /** - * Get the previous value at the rowKey as a char. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a char. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey ohe location in key space to get the previous value from. * @return the previous char at the rowKey, null values are represented by {@link QueryConstants#NULL_CHAR} @@ -126,7 +130,8 @@ public interface ElementSource { char getPrevChar(long rowKey); /** - * Get the previous value at the rowKey as a double. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a double. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. * @return the previous double at the rowKey, null values are represented by {@link QueryConstants#NULL_DOUBLE} @@ -134,7 +139,8 @@ public interface ElementSource { double getPrevDouble(long rowKey); /** - * Get the previous value at the rowKey as a float. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a float. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. * @return the previous float at the rowKey, null values are represented by {@link QueryConstants#NULL_FLOAT} @@ -142,7 +148,8 @@ public interface ElementSource { float getPrevFloat(long rowKey); /** - * Get the previous value at the rowKey as an int. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as an int. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. * @return the previous int at the rowKey, null values are represented by {@link QueryConstants#NULL_INT} @@ -158,7 +165,8 @@ public interface ElementSource { long getPrevLong(long rowKey); /** - * Get the previous value at the rowKey as a short. See {@link #getPrev(long)} for more details. + * Get the previous value at the rowKey as a short. See {@link #getPrev(long)} for more details. RowKeys that were + * not present are undefined. * * @param rowKey the location in key space to get the previous value from. * @return the previous short at the rowKey, null values are represented by {@link QueryConstants#NULL_SHORT} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java index 048de545d11..969adcfab59 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java @@ -11,10 +11,10 @@ import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; import io.deephaven.engine.table.impl.chunkfilter.ChunkFilter; import io.deephaven.engine.table.impl.chunkfilter.ChunkMatchFilterFactory; +import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.sources.UnboxedLongBackedColumnSource; import io.deephaven.time.DateTime; import io.deephaven.vector.*; @@ -25,6 +25,8 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; @@ -288,10 +290,14 @@ public final ColumnSource reinterpret */ protected ColumnSource doReinterpret( @NotNull final Class alternateDataType) { - Assert.eq(getType(), "getType()", DateTime.class); - Assert.eq(alternateDataType, "alternateDataType", long.class); - // noinspection unchecked - return (ColumnSource) new UnboxedLongBackedColumnSource<>(this); + if (getType() == DateTime.class || getType() == Instant.class || getType() == ZonedDateTime.class) { + Assert.eq(alternateDataType, "alternateDataType", long.class); + // noinspection unchecked + return (ColumnSource) new UnboxedLongBackedColumnSource<>(this); + } + throw new IllegalArgumentException("Unsupported reinterpret for " + getClass().getSimpleName() + + ": type=" + getType() + + ", alternateDataType=" + alternateDataType); } public static abstract class DefaultedMutable extends AbstractColumnSource diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java index 7cf16168acb..cea43a53707 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongChunkedAddOnlyMinMaxOperator.java @@ -36,7 +36,7 @@ class LongChunkedAddOnlyMinMaxOperator implements IterativeChunkedAggregationOperator { private final LongArraySource resultColumn; // region actualResult - private final ArrayBackedColumnSource actualResult; + private final ColumnSource actualResult; // endregion actualResult private final boolean minimum; private final String name; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java index de4d7ea07b1..33268ac5079 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeArraySource.java @@ -10,7 +10,7 @@ import org.jetbrains.annotations.NotNull; /** - * Array-backed {@link ColumnSource} for DBDateTimes. Allows reinterpretation to long and {@link java.time.Instant}. + * Array-backed {@link ColumnSource} for DateTimes. Allows reinterpretation to long and {@link java.time.Instant}. */ public class DateTimeArraySource extends NanosBasedTimeArraySource implements MutableColumnSourceGetDefaults.ForLongAsDateTime, ConvertableTimeSource { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java index 8665cf0cf14..dcbc0219cdd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DateTimeSparseArraySource.java @@ -11,7 +11,7 @@ import org.jetbrains.annotations.NotNull; /** - * Array-backed ColumnSource for DBDateTimes. Allows reinterpret as long. + * Array-backed ColumnSource for DateTimes. Allows reinterpret as long. */ public class DateTimeSparseArraySource extends NanosBasedTimeSparseArraySource implements MutableColumnSourceGetDefaults.ForLongAsDateTime, DefaultChunkSource, ConvertableTimeSource { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java index 105731a5ca0..f57ae53c747 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java @@ -24,7 +24,6 @@ import static io.deephaven.engine.table.impl.sources.sparse.SparseConstants.*; import java.time.Instant; -import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collection; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java index 5d77483a600..6c16a4ada32 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java @@ -214,7 +214,7 @@ public static ColumnHolder getDateTimeColumnHolder(String name, boolea * @param name column name * @param grouped true if the column is grouped; false otherwise * @param chunkData column data (long integers representing nanos since the epoch) - * @return a DBDateTime column holder implemented with longs for storage + * @return a DateTime column holder implemented with longs for storage */ public static ColumnHolder getDateTimeColumnHolder(String name, boolean grouped, Chunk chunkData) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java index 98b7f233a14..43627a3373c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableTimeConversions.java @@ -30,7 +30,7 @@ * *
  * baseTable = db.i("Market", "Trades")
- *               .where("Date > 2021-10-01")
+ *               .where("Date > 2021-10-01")
  *
  * startTime = LocalTime.of(10,30,00)
  * endTime = LocalTime.of(16,30,00)
@@ -268,7 +268,7 @@ public static Table asInstant(final @NotNull Table source, @NotNull final MatchP
     }
     // endregion
 
-    // region to DbDateTime
+    // region to DateTime
     /**
      * Convert the specified column in the table to a {@link DateTime} column. The column may be specified as a single
      * value "Column" or a pair "NewColumn = OriginalColumn"
diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java
index 4413b58421e..0050c07844b 100644
--- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java
+++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestReinterpretedColumn.java
@@ -45,7 +45,7 @@
 public class TestReinterpretedColumn extends RefreshingTableTestCase {
     final int ROW_COUNT = 60;
     private final long baseLongTime = DateTimeUtils.convertDateTime("2021-10-20T09:30:00.000 NY").getNanos();
-    private final DateTime baseDBDateTime = DateTimeUtils.convertDateTime("2021-10-19T10:30:00.000 NY");
+    private final DateTime baseDateTime = DateTimeUtils.convertDateTime("2021-10-19T10:30:00.000 NY");
     private final ZonedDateTime baseZDT = ZonedDateTime.of(2021, 10, 18, 11, 30, 0, 0, ZoneId.of("America/New_York"));
     private final Instant baseInstant = DateTimeUtils.convertDateTime("2021-10-17T12:30:00.000 NY").getInstant();
 
@@ -53,7 +53,6 @@ public class TestReinterpretedColumn extends RefreshingTableTestCase {
     private QueryTable sparseBaseTable;
     private QueryTable objectTable;
     private QueryTable sparseObjectTable;
-    // private QueryTable regionedTable;
 
     @Override
     public void setUp() throws Exception {
@@ -78,8 +77,6 @@ public void setUp() throws Exception {
                 new ObjectSparseArraySource<>(DateTime.class),
                 new ObjectSparseArraySource<>(Instant.class),
                 new ObjectSparseArraySource<>(ZonedDateTime.class));
-
-        // regionedTable = makeRegioned();
     }
 
     private QueryTable makeObjectTable(WritableColumnSource longSource, WritableColumnSource dtSource,
@@ -92,7 +89,7 @@ private QueryTable makeObjectTable(WritableColumnSource longSource, Writab
         for (int ii = 0; ii < ROW_COUNT; ii++) {
             final long tOff = ii * 60 * 1_000_000_000L;
             longSource.set(ii, Long.valueOf(baseLongTime + tOff));
-            dtSource.set(ii, DateTimeUtils.nanosToTime(baseDBDateTime.getNanos() + tOff));
+            dtSource.set(ii, DateTimeUtils.nanosToTime(baseDateTime.getNanos() + tOff));
             iSource.set(ii, DateTimeUtils.makeInstant(DateTimeUtils.toEpochNano(baseInstant) + tOff));
             zdtSource.set(ii, DateTimeUtils.makeZonedDateTime(DateTimeUtils.toEpochNano(baseZDT) + tOff,
                     ZoneId.of("America/New_York")));
@@ -117,7 +114,7 @@ private QueryTable makeTable(WritableColumnSource longSource, WritableColu
         for (int ii = 0; ii < ROW_COUNT; ii++) {
             final long tOff = ii * 60 * 1_000_000_000L;
             longSource.set(ii, baseLongTime + tOff);
-            dtSource.set(ii, baseDBDateTime.getNanos() + tOff);
+            dtSource.set(ii, baseDateTime.getNanos() + tOff);
             iSource.set(ii, DateTimeUtils.toEpochNano(baseInstant) + tOff);
             zdtSource.set(ii, DateTimeUtils.toEpochNano(baseZDT) + tOff);
         }
@@ -131,51 +128,6 @@ private QueryTable makeTable(WritableColumnSource longSource, WritableColu
         return new QueryTable(RowSetFactory.flat(ROW_COUNT).toTracking(), cols);
     }
 
-    // private QueryTable makeRegioned() throws IOException {
-    // final Path rootPath = Files.createTempDirectory(Paths.get(Configuration.getInstance().getWorkspacePath()),
-    // "TestReinterpret");
-    // final File rootFile = rootPath.toFile();
-    // final SchemaService schemaService =
-    // SchemaServiceFactoryForTest.getTransientInstance(Configuration.getInstance());
-    //
-    // final Path namespacePath = rootPath.resolve(Paths.get("Intraday", "TR", "TR"));
-    // namespacePath.toFile().mkdirs();
-    //
-    // Configuration.getInstance().setProperty("IrisDB.permissionFilterProvider", "null");
-    // PermissionFilterProvider.FACTORY.reload();
-    //
-    // final LocalTablePathManager pathManager = new LocalTablePathManager(rootFile);
-    // final OnDiskQueryDatabase db = new OnDiskQueryDatabase(Logger.NULL, rootFile, new
-    // LocalTableDataService(pathManager), schemaService);
-    // db.setUserContext(null, new SimpleUserContext("nobody", "nobody"));
-    //
-    // final TableDefinition forNPT = baseTable.updateView("PC=`1`").getDefinition();
-    // forNPT.setStorageType(DefaultTableDefinition.STORAGETYPE_NESTEDPARTITIONEDONDISK);
-    // forNPT.setNamespace("TR");
-    // forNPT.setName("TR");
-    // forNPT.getColumn("PC").setColumnType(DefaultColumnDefinition.COLUMNTYPE_PARTITIONING);
-    // forNPT.getColumn("ZDT").setObjectCodecClass(ZonedDateTimeCodec.class.getName());
-    //
-    // schemaService.createNamespace(NamespaceSet.SYSTEM, "TR");
-    // schemaService.addSchema(schemaService.fromDefinition(forNPT, "TR", "TR",
-    // TableDefinition.STORAGETYPE_NESTEDPARTITIONEDONDISK, NamespaceSet.SYSTEM));
-    //
-    // final List
slices = new ArrayList<>(); - // for(int ii = 0; ii< ROW_COUNT; ii += 10) { - // slices.add(baseTable.slice(ii, Math.min(baseTable.size(), ii + 10))); - // } - // - // final File[] dests = new File[slices.size()]; - // for(int ii = 0; ii < slices.size(); ii++) { - // dests[ii] = pathManager.getLocation(new FullTableLocationKey("TR", "TR", TableType.SYSTEM_INTRADAY, - // Integer.toString(ii), "1")); - // } - // - // TableManagementTools.writeTables(slices.toArray(Table.ZERO_LENGTH_TABLE_ARRAY), forNPT.getWritable(), dests); - // - // return (QueryTable) db.i("TR", "TR").where(); - // } - private long computeTimeDiff(final int iteration, boolean invert) { return (invert ? ROW_COUNT - iteration - 1 : iteration) * 60 * 1_000_000_000L; } @@ -190,8 +142,6 @@ public void testReinterpretLong() { testReinterpretLong(objectTable, false, true); testReinterpretLong(sparseObjectTable, false, false); testReinterpretLong(sparseObjectTable, false, true); - // testReinterpretLong(regionedTable, false, false); - // testReinterpretLong(regionedTable, false, true); } private void testReinterpretLong(final Table initial, boolean isSorted, boolean withRename) { @@ -225,7 +175,7 @@ private void testReinterpretLong(final Table initial, boolean isSorted, boolean } else { assertEquals(baseLongTime + tOff, table.getColumnSource(lColName).getLong(key)); } - assertEquals(baseDBDateTime.getNanos() + tOff, table.getColumnSource(dtColName).getLong(key)); + assertEquals(baseDateTime.getNanos() + tOff, table.getColumnSource(dtColName).getLong(key)); assertEquals(DateTimeUtils.toEpochNano(baseInstant) + tOff, table.getColumnSource(iColName).getLong(key)); assertEquals(DateTimeUtils.toEpochNano(baseZDT) + tOff, table.getColumnSource(zdtColName).getLong(key)); } @@ -233,7 +183,7 @@ private void testReinterpretLong(final Table initial, boolean isSorted, boolean // Repeat the same comparisons, but actuate fillChunk instead reinterpLongChunkCheck(table.getColumnSource(lColName), table.getRowSet(), isSorted, baseLongTime); reinterpLongChunkCheck(table.getColumnSource(dtColName), table.getRowSet(), isSorted, - baseDBDateTime.getNanos()); + baseDateTime.getNanos()); reinterpLongChunkCheck(table.getColumnSource(iColName), table.getRowSet(), isSorted, DateTimeUtils.toEpochNano(baseInstant)); reinterpLongChunkCheck(table.getColumnSource(zdtColName), table.getRowSet(), isSorted, @@ -289,7 +239,7 @@ private void doReinterpretTestBasic(final Table initial, final String iColName = withRename ? "R_I" : "I"; final String zdtColName = withRename ? "R_ZDT" : "ZDT"; - // Make everything a DBDateTime + // Make everything a DateTime Table table = reinterpreter.apply(initial, lColName + "=L"); table = reinterpreter.apply(table, dtColName + "=DT"); table = reinterpreter.apply(table, iColName + "=I"); @@ -312,7 +262,7 @@ private void doReinterpretTestBasic(final Table initial, assertEquals(baseLongTime + tOff, (long) toNanoFunc.apply((T) table.getColumnSource(lColName).get(key))); extraCheck.accept((T) table.getColumnSource(lColName).get(key)); - assertEquals(baseDBDateTime.getNanos() + tOff, + assertEquals(baseDateTime.getNanos() + tOff, (long) toNanoFunc.apply((T) table.getColumnSource(dtColName).get(key))); extraCheck.accept((T) table.getColumnSource(dtColName).get(key)); assertEquals(DateTimeUtils.toEpochNano(baseInstant) + tOff, @@ -327,7 +277,7 @@ private void doReinterpretTestBasic(final Table initial, reinterpBasicChunkCheck(table.getColumnSource(lColName), table.getRowSet(), toNanoFunc, isSorted, baseLongTime, extraCheck); reinterpBasicChunkCheck(table.getColumnSource(dtColName), table.getRowSet(), toNanoFunc, isSorted, - baseDBDateTime.getNanos(), extraCheck); + baseDateTime.getNanos(), extraCheck); reinterpBasicChunkCheck(table.getColumnSource(iColName), table.getRowSet(), toNanoFunc, isSorted, DateTimeUtils.toEpochNano(baseInstant), extraCheck); reinterpBasicChunkCheck(table.getColumnSource(zdtColName), table.getRowSet(), toNanoFunc, isSorted, @@ -364,8 +314,6 @@ public void testReinterpretDBDT() { objectTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); doReinterpretTestBasic( sparseObjectTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); - // doReinterpretTestBasic( - // regionedTable, DateTime.class, TableTimeConversions::asDateTime, "DT", DateTimeUtils::nanos); } @Test @@ -378,8 +326,6 @@ public void testReinterpretInstant() { objectTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); doReinterpretTestBasic( sparseObjectTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); - // doReinterpretTestBasic( - // regionedTable, Instant.class, TableTimeConversions::asInstant, "I", DateTimeUtils::toEpochNano); } @Test @@ -399,9 +345,6 @@ public void testReinterpretZdt() { doReinterpretTestBasic(sparseObjectTable, ZonedDateTime.class, (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), null, DateTimeUtils::toEpochNano, extraCheck); - // doReinterpretTestBasic(regionedTable, ZonedDateTime.class, - // (t, c) -> TableTimeConversions.asZonedDateTime(t, c, "America/Chicago"), - // null, DateTimeUtils::toEpochNano, extraCheck); } private void reinterpWrappedChunkCheck(final ColumnSource cs, RowSet rowSet, final boolean isSorted, @@ -422,7 +365,6 @@ public void testReinterpretLocalDate() { doTestReinterpretLocalDate(sparseBaseTable, false); doTestReinterpretLocalDate(objectTable, false); doTestReinterpretLocalDate(sparseObjectTable, false); - // doTestReinterpretLocalDate(regionedTable, false); } private void doTestReinterpretLocalDate(final Table initial, boolean sorted) { @@ -465,7 +407,6 @@ public void testReinterpretLocalTime() { doTestReinterpretLocalTime(sparseBaseTable, false); doTestReinterpretLocalTime(objectTable, false); doTestReinterpretLocalTime(sparseObjectTable, false); - // doTestReinterpretLocalTime(regionedTable, false); } private void doTestReinterpretLocalTime(final Table initial, boolean sorted) { diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/generator/TestDataGenerator.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/generator/TestDataGenerator.java index 93ae80d838a..93a115e9d95 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/generator/TestDataGenerator.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/generator/TestDataGenerator.java @@ -14,7 +14,7 @@ * create the column. * * There are two type parameters. T is the type of our column, U is the type of values we generate. Often these are - * identical, but for DateTimes we can generate longs (U) and have present the DBDateTime to the user (T). + * identical, but for DateTimes we can generate longs (U) and have present the DateTime to the user (T). * * @param the outward facing type of the column source * @param the type of values that will be generated diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java index 56f398fb232..0d8a440b0ac 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateOperators.java @@ -82,7 +82,7 @@ private static void fixupLongAddOnlyMinMax() throws IOException { List lines = ReplicationUtils .fixupChunkAttributes(FileUtils.readLines(longAddOnlyMinMaxFile, Charset.defaultCharset())); lines = ReplicationUtils.replaceRegion(lines, "actualResult", Collections.singletonList( - " private final ArrayBackedColumnSource actualResult;")); + " private final ColumnSource actualResult;")); lines = ReplicationUtils.replaceRegion(lines, "extra constructor params", Collections.singletonList(" Class type,")); lines = ReplicationUtils.addImport(lines, From d24e85587495b7419821963684c208be3fbeadcf Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 11:27:47 -0700 Subject: [PATCH 04/14] rebase fix --- .../table/impl/sources/NanosBasedTimeArraySource.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java index 3ad9829dba5..623aa639e5a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; @@ -144,6 +145,11 @@ public long resetWritableChunkToBackingStore(@NotNull ResettableWritableChunk public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChunk chunk, long position) { throw new UnsupportedOperationException(); } + + @Override + public void prepareForParallelPopulation(RowSet rowSet) { + nanoSource.prepareForParallelPopulation(rowSet); + } // endregion // region Chunking From 41e5effb0e08f557ca19d95488524801d805ac21 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 14:49:21 -0700 Subject: [PATCH 05/14] Use WritableColumnSource as return type of ArrayBackedColumnSource#getMemoryColumnSource --- .../engine/table/impl/GroupingUtils.java | 8 +- ...crementalChunkedCrossJoinStateManager.java | 15 +- .../engine/table/impl/QueryTable.java | 6 +- ...crementalChunkedCrossJoinStateManager.java | 19 ++- .../engine/table/impl/SortHelpers.java | 2 +- .../table/impl/SourcePartitionedTable.java | 4 +- .../StaticChunkedCrossJoinStateManager.java | 19 ++- .../engine/table/impl/StreamTableTools.java | 4 +- .../table/impl/SymbolTableCombiner.java | 31 ++-- .../BaseStreamFirstOrLastChunkedOperator.java | 6 +- .../by/ChunkedOperatorAggregationHelper.java | 8 +- ...ratorAggregationStateManagerTypedBase.java | 5 +- .../impl/by/ssmminmax/BooleanSetResult.java | 4 +- .../impl/by/ssmminmax/ByteSetResult.java | 4 +- .../impl/by/ssmminmax/CharSetResult.java | 4 +- .../impl/by/ssmminmax/DateTimeSetResult.java | 4 +- .../impl/by/ssmminmax/DoubleSetResult.java | 4 +- .../impl/by/ssmminmax/FloatSetResult.java | 4 +- .../table/impl/by/ssmminmax/IntSetResult.java | 4 +- .../impl/by/ssmminmax/LongSetResult.java | 4 +- .../impl/by/ssmminmax/ObjectSetResult.java | 4 +- .../impl/by/ssmminmax/ShortSetResult.java | 4 +- .../ssmminmax/SsmChunkedMinMaxOperator.java | 7 +- .../BooleanPercentileTypeHelper.java | 4 +- .../BytePercentileTypeHelper.java | 4 +- .../BytePercentileTypeMedianHelper.java | 4 +- .../CharPercentileTypeHelper.java | 4 +- .../CharPercentileTypeMedianHelper.java | 4 +- .../DateTimePercentileTypeHelper.java | 4 +- .../DoublePercentileTypeHelper.java | 4 +- .../DoublePercentileTypeMedianHelper.java | 4 +- .../FloatPercentileTypeHelper.java | 4 +- .../FloatPercentileTypeMedianHelper.java | 4 +- .../IntPercentileTypeHelper.java | 4 +- .../IntPercentileTypeMedianHelper.java | 4 +- .../LongPercentileTypeHelper.java | 4 +- .../LongPercentileTypeMedianHelper.java | 4 +- .../ObjectPercentileTypeHelper.java | 4 +- .../ShortPercentileTypeHelper.java | 4 +- .../ShortPercentileTypeMedianHelper.java | 4 +- .../SsmChunkedPercentileOperator.java | 7 +- .../snapshot/SnapshotInternalListener.java | 5 +- .../table/impl/sort/LongMegaMergeKernel.java | 4 +- .../ByteLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/ByteLongMegaMergeKernel.java | 4 +- .../CharLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/CharLongMegaMergeKernel.java | 4 +- .../DoubleLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/DoubleLongMegaMergeKernel.java | 4 +- .../FloatLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/FloatLongMegaMergeKernel.java | 4 +- .../IntLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/IntLongMegaMergeKernel.java | 4 +- .../LongLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/LongLongMegaMergeKernel.java | 4 +- .../ObjectLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/ObjectLongMegaMergeKernel.java | 4 +- .../ShortLongMegaMergeDescendingKernel.java | 4 +- .../megamerge/ShortLongMegaMergeKernel.java | 4 +- .../impl/sources/ArrayBackedColumnSource.java | 34 ++-- .../sources/NanosBasedTimeArraySource.java | 100 ++--------- .../deltaaware/DeltaAwareColumnSource.java | 2 +- .../util/BaseArrayBackedMutableTable.java | 17 +- .../engine/table/impl/util/ColumnHolder.java | 5 +- .../table/impl/util/DynamicTableWriter.java | 159 +++++++++--------- .../util/FunctionGeneratedTableFactory.java | 2 +- .../engine/table/impl/util/TableBuilder.java | 9 +- .../io/deephaven/engine/util/TableTools.java | 18 +- .../AbstractBooleanColumnSourceTest.java | 4 - .../sources/ArrayBackedColumnSourceTest.java | 7 +- .../main/java/io/deephaven/csv/CsvTools.java | 9 +- replication/static/build.gradle | 3 - .../ReplicatePrimitiveLibTests.java | 23 --- .../replicators/ReplicatePrimitiveLibs.java | 41 ----- 74 files changed, 312 insertions(+), 443 deletions(-) delete mode 100644 replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibTests.java delete mode 100644 replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibs.java diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/GroupingUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/GroupingUtils.java index b38d0cf2389..f4761ed636b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/GroupingUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/GroupingUtils.java @@ -90,10 +90,10 @@ public static void forEachGroup(@NotNull final Map groupToR * @return A pair of a flat key column source and a flat RowSet column source */ @SuppressWarnings("unused") - public static Pair, ObjectArraySource> groupingToFlatSources( + public static Pair, ObjectArraySource> groupingToFlatSources( @NotNull final ColumnSource originalKeyColumnSource, @NotNull final Map groupToRowSet) { final int numGroups = groupToRowSet.size(); - final ArrayBackedColumnSource resultKeyColumnSource = getMemoryColumnSource( + final WritableColumnSource resultKeyColumnSource = getMemoryColumnSource( numGroups, originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); final ObjectArraySource resultIndexColumnSource = new ObjectArraySource<>(TrackingWritableRowSet.class); @@ -138,13 +138,13 @@ public static void forEachResponsiveGroup(@NotNull final Map Pair, ObjectArraySource> groupingToFlatSources( + public static Pair, ObjectArraySource> groupingToFlatSources( @NotNull final ColumnSource originalKeyColumnSource, @NotNull final Map groupToRowSet, @NotNull final RowSet intersect, @NotNull final MutableInt responsiveGroups) { final int numGroups = groupToRowSet.size(); - final ArrayBackedColumnSource resultKeyColumnSource = getMemoryColumnSource( + final WritableColumnSource resultKeyColumnSource = getMemoryColumnSource( numGroups, originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); final ObjectArraySource resultIndexColumnSource = new ObjectArraySource<>(TrackingWritableRowSet.class); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/LeftOnlyIncrementalChunkedCrossJoinStateManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/LeftOnlyIncrementalChunkedCrossJoinStateManager.java index efb5098b68e..b4e38f32c76 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/LeftOnlyIncrementalChunkedCrossJoinStateManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/LeftOnlyIncrementalChunkedCrossJoinStateManager.java @@ -137,7 +137,7 @@ interface StateTrackingCallbackWithRightIndex { // endmixin rehash // the keys for our hash entries - private final ArrayBackedColumnSource[] keySources; + private final WritableColumnSource[] keySources; // the location of any overflow entry in this bucket private final IntegerArraySource overflowLocationSource = new IntegerArraySource(); @@ -150,7 +150,7 @@ interface StateTrackingCallbackWithRightIndex { // the keys for overflow private int nextOverflowLocation = 0; - private final ArrayBackedColumnSource [] overflowKeySources; + private final WritableColumnSource [] overflowKeySources; // the location of the next key in an overflow bucket private final IntegerArraySource overflowOverflowLocationSource = new IntegerArraySource(); // the overflow buckets for the state source @@ -1300,10 +1300,10 @@ private int allocateOverflowLocation() { return nextOverflowLocation++; } - private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, ArrayBackedColumnSource[] sources) { - final long firstBackingChunkPosition = sources[0].resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); + private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, WritableColumnSource[] sources) { + final long firstBackingChunkPosition = ((ChunkedBackingStoreExposedWritableSource)sources[0]).resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); for (int jj = 1; jj < sources.length; ++jj) { - if (sources[jj].resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { + if (((ChunkedBackingStoreExposedWritableSource)sources[jj]).resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); } if (writeThroughChunks[jj].size() != writeThroughChunks[0].size()) { @@ -1367,9 +1367,10 @@ private void fillOverflowKeys(ColumnSource.FillContext[] fillContexts, WritableC fillKeys(overflowKeySources, fillContexts, keyChunks, overflowLocationsChunk); } - private static void fillKeys(ArrayBackedColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { + private static void fillKeys(WritableColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { for (int ii = 0; ii < keySources.length; ++ii) { - keySources[ii].fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); + //noinspection unchecked + ((FillUnordered) keySources[ii]).fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java index a3f6d696988..87693756e11 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java @@ -1873,9 +1873,9 @@ private Table snapshotHistoryInternal(final Table baseTable) { checkInitiateOperation(); // resultColumns initially contains the trigger columns, then we insert the base columns into it - final Map> resultColumns = SnapshotUtils + final Map> resultColumns = SnapshotUtils .createColumnSourceMap(this.getColumnSourceMap(), ArrayBackedColumnSource::getMemoryColumnSource); - final Map> baseColumns = SnapshotUtils.createColumnSourceMap( + final Map> baseColumns = SnapshotUtils.createColumnSourceMap( baseTable.getColumnSourceMap(), ArrayBackedColumnSource::getMemoryColumnSource); resultColumns.putAll(baseColumns); @@ -1962,7 +1962,7 @@ private Table snapshotInternal(Table baseTable, boolean doInitialSnapshot, Strin } // Establish the "base" columns using the same names and types as the table being snapshotted - final Map> baseColumns = + final Map> baseColumns = SnapshotUtils.createColumnSourceMap(baseTable.getColumnSourceMap(), ArrayBackedColumnSource::getMemoryColumnSource); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/RightIncrementalChunkedCrossJoinStateManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/RightIncrementalChunkedCrossJoinStateManager.java index 531fbeeecfd..c4dcecf3927 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/RightIncrementalChunkedCrossJoinStateManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/RightIncrementalChunkedCrossJoinStateManager.java @@ -127,7 +127,7 @@ interface StateTrackingCallback { // endmixin rehash // the keys for our hash entries - private final ArrayBackedColumnSource[] keySources; + private final WritableColumnSource[] keySources; // the location of any overflow entry in this bucket private final IntegerArraySource overflowLocationSource = new IntegerArraySource(); @@ -140,7 +140,7 @@ interface StateTrackingCallback { // the keys for overflow private int nextOverflowLocation = 0; - private final ArrayBackedColumnSource [] overflowKeySources; + private final WritableColumnSource [] overflowKeySources; // the location of the next key in an overflow bucket private final IntegerArraySource overflowOverflowLocationSource = new IntegerArraySource(); // the overflow buckets for the state source @@ -212,8 +212,8 @@ interface StateTrackingCallback { this.tableHashPivot = tableSize; // endmixin rehash - overflowKeySources = new ArrayBackedColumnSource[keyColumnCount]; - keySources = new ArrayBackedColumnSource[keyColumnCount]; + overflowKeySources = new WritableColumnSource[keyColumnCount]; + keySources = new WritableColumnSource[keyColumnCount]; keyChunkTypes = new ChunkType[keyColumnCount]; chunkHashers = new ChunkHasher[keyColumnCount]; @@ -1615,10 +1615,10 @@ private int allocateOverflowLocation() { return nextOverflowLocation++; } - private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, ArrayBackedColumnSource[] sources) { - final long firstBackingChunkPosition = sources[0].resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); + private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, WritableColumnSource[] sources) { + final long firstBackingChunkPosition = ((ChunkedBackingStoreExposedWritableSource)sources[0]).resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); for (int jj = 1; jj < sources.length; ++jj) { - if (sources[jj].resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { + if (((ChunkedBackingStoreExposedWritableSource)sources[jj]).resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); } if (writeThroughChunks[jj].size() != writeThroughChunks[0].size()) { @@ -1682,9 +1682,10 @@ private void fillOverflowKeys(ColumnSource.FillContext[] fillContexts, WritableC fillKeys(overflowKeySources, fillContexts, keyChunks, overflowLocationsChunk); } - private static void fillKeys(ArrayBackedColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { + private static void fillKeys(WritableColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { for (int ii = 0; ii < keySources.length; ++ii) { - keySources[ii].fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); + //noinspection unchecked + ((FillUnordered) keySources[ii]).fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SortHelpers.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SortHelpers.java index e8eef2bb38e..65c19efe8a4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SortHelpers.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SortHelpers.java @@ -457,7 +457,7 @@ private static SortMapping doMegaSortOne(SortingOrder order, ColumnSource valuesToMerge = + final WritableColumnSource valuesToMerge = ArrayBackedColumnSource.getMemoryColumnSource(0, columnSource.getType()); valuesToMerge.ensureCapacity(sortSize, false); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java index ea8f49ae306..cd2dde47469 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java @@ -83,8 +83,8 @@ private static final class UnderlyingTableMaintainer { private final Predicate locationKeyMatcher; private final TrackingWritableRowSet resultRows; - private final ArrayBackedColumnSource resultTableLocationKeys; - private final ArrayBackedColumnSource
resultLocationTables; + private final WritableColumnSource resultTableLocationKeys; + private final WritableColumnSource
resultLocationTables; private final QueryTable result; private final UpdateSourceCombiner refreshCombiner; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/StaticChunkedCrossJoinStateManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/StaticChunkedCrossJoinStateManager.java index 12573ef4a2a..47ea9f4e991 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/StaticChunkedCrossJoinStateManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/StaticChunkedCrossJoinStateManager.java @@ -121,7 +121,7 @@ interface StateTrackingCallback { // endmixin rehash // the keys for our hash entries - private final ArrayBackedColumnSource[] keySources; + private final WritableColumnSource[] keySources; // the location of any overflow entry in this bucket private final IntegerArraySource overflowLocationSource = new IntegerArraySource(); @@ -134,7 +134,7 @@ interface StateTrackingCallback { // the keys for overflow private int nextOverflowLocation = 0; - private final ArrayBackedColumnSource [] overflowKeySources; + private final WritableColumnSource [] overflowKeySources; // the location of the next key in an overflow bucket private final IntegerArraySource overflowOverflowLocationSource = new IntegerArraySource(); // the overflow buckets for the state source @@ -197,8 +197,8 @@ interface StateTrackingCallback { this.tableHashPivot = tableSize; // endmixin rehash - overflowKeySources = new ArrayBackedColumnSource[keyColumnCount]; - keySources = new ArrayBackedColumnSource[keyColumnCount]; + overflowKeySources = new WritableColumnSource[keyColumnCount]; + keySources = new WritableColumnSource[keyColumnCount]; keyChunkTypes = new ChunkType[keyColumnCount]; chunkHashers = new ChunkHasher[keyColumnCount]; @@ -1257,10 +1257,10 @@ private int allocateOverflowLocation() { return nextOverflowLocation++; } - private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, ArrayBackedColumnSource[] sources) { - final long firstBackingChunkPosition = sources[0].resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); + private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, WritableColumnSource[] sources) { + final long firstBackingChunkPosition = ((ChunkedBackingStoreExposedWritableSource)sources[0]).resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); for (int jj = 1; jj < sources.length; ++jj) { - if (sources[jj].resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { + if (((ChunkedBackingStoreExposedWritableSource)sources[jj]).resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); } if (writeThroughChunks[jj].size() != writeThroughChunks[0].size()) { @@ -1324,9 +1324,10 @@ private void fillOverflowKeys(ColumnSource.FillContext[] fillContexts, WritableC fillKeys(overflowKeySources, fillContexts, keyChunks, overflowLocationsChunk); } - private static void fillKeys(ArrayBackedColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { + private static void fillKeys(WritableColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { for (int ii = 0; ii < keySources.length; ++ii) { - keySources[ii].fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); + //noinspection unchecked + ((FillUnordered)keySources[ii]).fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/StreamTableTools.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/StreamTableTools.java index 78dba8521ac..a3c5b99c966 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/StreamTableTools.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/StreamTableTools.java @@ -52,7 +52,7 @@ public static Table streamToAppendOnlyTable(final Table streamTable) { ConstructSnapshot.callDataSnapshotFunction("streamToAppendOnlyTable", swapListener.makeSnapshotControl(), (boolean usePrev, long beforeClockValue) -> { - final Map> columns = new LinkedHashMap<>(); + final Map> columns = new LinkedHashMap<>(); final Map> columnSourceMap = baseStreamTable.getColumnSourceMap(); final int columnCount = columnSourceMap.size(); @@ -62,7 +62,7 @@ public static Table streamToAppendOnlyTable(final Table streamTable) { for (Map.Entry> nameColumnSourceEntry : columnSourceMap .entrySet()) { final ColumnSource existingColumn = nameColumnSourceEntry.getValue(); - final ArrayBackedColumnSource newColumn = ArrayBackedColumnSource.getMemoryColumnSource( + final WritableColumnSource newColumn = ArrayBackedColumnSource.getMemoryColumnSource( 0, existingColumn.getType(), existingColumn.getComponentType()); columns.put(nameColumnSourceEntry.getKey(), newColumn); // for the source columns, we would like to read primitives instead of objects in cases diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SymbolTableCombiner.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SymbolTableCombiner.java index 5566700ec4b..792ee55a6a9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SymbolTableCombiner.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SymbolTableCombiner.java @@ -104,7 +104,7 @@ class SymbolTableCombiner // endmixin rehash // the keys for our hash entries - private final ArrayBackedColumnSource[] keySources; + private final WritableColumnSource[] keySources; // the location of any overflow entry in this bucket private final IntegerArraySource overflowLocationSource = new IntegerArraySource(); @@ -117,7 +117,7 @@ class SymbolTableCombiner // the keys for overflow private int nextOverflowLocation = 0; - private final ArrayBackedColumnSource [] overflowKeySources; + private final WritableColumnSource [] overflowKeySources; // the location of the next key in an overflow bucket private final IntegerArraySource overflowOverflowLocationSource = new IntegerArraySource(); // the overflow buckets for the state source @@ -165,8 +165,8 @@ class SymbolTableCombiner this.tableHashPivot = tableSize; // endmixin rehash - overflowKeySources = new ArrayBackedColumnSource[keyColumnCount]; - keySources = new ArrayBackedColumnSource[keyColumnCount]; + overflowKeySources = new WritableColumnSource[keyColumnCount]; + keySources = new WritableColumnSource[keyColumnCount]; keyChunkTypes = new ChunkType[keyColumnCount]; chunkHashers = new ChunkHasher[keyColumnCount]; @@ -1094,10 +1094,10 @@ private void swapOverflowPointers(LongChunk tableLocationsChunk, LongCh private void updateWriteThroughState(ResettableWritableIntChunk writeThroughState, long firstPosition, long expectedLastPosition) { final long firstBackingChunkPosition = uniqueIdentifierSource.resetWritableChunkToBackingStore(writeThroughState, firstPosition); if (firstBackingChunkPosition != firstPosition) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } if (firstBackingChunkPosition + writeThroughState.size() - 1 != expectedLastPosition) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } } // endmixin allowUpdateWriteThroughState @@ -1105,10 +1105,10 @@ private void updateWriteThroughState(ResettableWritableIntChunk writeThr private void updateWriteThroughOverflow(ResettableWritableIntChunk writeThroughOverflow, long firstPosition, long expectedLastPosition) { final long firstBackingChunkPosition = overflowLocationSource.resetWritableChunkToBackingStore(writeThroughOverflow, firstPosition); if (firstBackingChunkPosition != firstPosition) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } if (firstBackingChunkPosition + writeThroughOverflow.size() - 1 != expectedLastPosition) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } } @@ -1123,14 +1123,14 @@ private int allocateOverflowLocation() { return nextOverflowLocation++; } - private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, ArrayBackedColumnSource[] sources) { - final long firstBackingChunkPosition = sources[0].resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); + private static long updateWriteThroughChunks(ResettableWritableChunk[] writeThroughChunks, long currentHashLocation, WritableColumnSource[] sources) { + final long firstBackingChunkPosition = ((ChunkedBackingStoreExposedWritableSource)sources[0]).resetWritableChunkToBackingStore(writeThroughChunks[0], currentHashLocation); for (int jj = 1; jj < sources.length; ++jj) { - if (sources[jj].resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + if (((ChunkedBackingStoreExposedWritableSource)sources[jj]).resetWritableChunkToBackingStore(writeThroughChunks[jj], currentHashLocation) != firstBackingChunkPosition) { + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } if (writeThroughChunks[jj].size() != writeThroughChunks[0].size()) { - throw new IllegalStateException("ArrayBackedColumnSources have different block sizes!"); + throw new IllegalStateException("WritableColumnSources have different block sizes!"); } } return firstBackingChunkPosition; @@ -1187,9 +1187,10 @@ private void fillOverflowKeys(ColumnSource.FillContext[] fillContexts, WritableC fillKeys(overflowKeySources, fillContexts, keyChunks, overflowLocationsChunk); } - private static void fillKeys(ArrayBackedColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { + private static void fillKeys(WritableColumnSource[] keySources, ColumnSource.FillContext[] fillContexts, WritableChunk[] keyChunks, WritableLongChunk keyIndices) { for (int ii = 0; ii < keySources.length; ++ii) { - keySources[ii].fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); + //noinspection unchecked + ((FillUnordered) keySources[ii]).fillChunkUnordered(fillContexts[ii], keyChunks[ii], keyIndices); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/BaseStreamFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/BaseStreamFirstOrLastChunkedOperator.java index 74e23ff35c2..692da25a0da 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/BaseStreamFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/BaseStreamFirstOrLastChunkedOperator.java @@ -36,7 +36,7 @@ public abstract class BaseStreamFirstOrLastChunkedOperator /** * Result columns, parallel to {@link #inputColumns} and {@link #outputColumns}. */ - private final Map> resultColumns; + private final Map> resultColumns; /** *

* Input columns, parallel to {@link #outputColumns} and {@link #resultColumns}. @@ -66,11 +66,11 @@ protected BaseStreamFirstOrLastChunkedOperator(@NotNull final MatchPair[] result numResultColumns = resultPairs.length; inputColumns = new ColumnSource[numResultColumns]; outputColumns = new WritableColumnSource[numResultColumns]; - final Map> resultColumnsMutable = new LinkedHashMap<>(numResultColumns); + final Map> resultColumnsMutable = new LinkedHashMap<>(numResultColumns); for (int ci = 0; ci < numResultColumns; ++ci) { final MatchPair resultPair = resultPairs[ci]; final ColumnSource streamSource = streamTable.getColumnSource(resultPair.rightColumn()); - final ArrayBackedColumnSource resultSource = ArrayBackedColumnSource.getMemoryColumnSource(0, + final WritableColumnSource resultSource = ArrayBackedColumnSource.getMemoryColumnSource(0, streamSource.getType(), streamSource.getComponentType()); resultColumnsMutable.put(resultPair.leftColumn(), resultSource); inputColumns[ci] = ReinterpretUtils.maybeConvertToPrimitive(streamSource); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java index 7ca7af2f66d..779982f548d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java @@ -192,8 +192,8 @@ private static QueryTable aggregation( // Gather the result key columns final ColumnSource[] keyColumnsRaw = new ColumnSource[keyHashTableSources.length]; - final ArrayBackedColumnSource[] keyColumnsCopied = - input.isRefreshing() ? new ArrayBackedColumnSource[keyHashTableSources.length] : null; + final WritableColumnSource[] keyColumnsCopied = + input.isRefreshing() ? new WritableColumnSource[keyHashTableSources.length] : null; for (int kci = 0; kci < keyHashTableSources.length; ++kci) { ColumnSource resultKeyColumnSource = keyHashTableSources[kci]; if (keySources[kci] != reinterpretedKeySources[kci]) { @@ -1533,7 +1533,7 @@ private static void modifySlots(RowSetBuilderRandom modifiedBuilder, IntChunk keySource, AggregationContext ac) { - final Pair> groupKeyIndexTable; + final Pair> groupKeyIndexTable; final Map grouping = RowSetIndexer.of(withView.getRowSet()).getGrouping(keySource); // noinspection unchecked groupKeyIndexTable = GroupingUtils.groupingToFlatSources((ColumnSource) keySource, grouping); @@ -1814,7 +1814,7 @@ private static void initialGroupedKeyAddition(QueryTable input, MutableInt outputPosition, RowSetBuilderRandom initialRowsBuilder, boolean usePrev) { - final Pair> groupKeyIndexTable; + final Pair> groupKeyIndexTable; final RowSetIndexer indexer = RowSetIndexer.of(input.getRowSet()); final Map grouping = usePrev ? indexer.getPrevGrouping(reinterpretedKeySources[0]) : indexer.getGrouping(reinterpretedKeySources[0]); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/OperatorAggregationStateManagerTypedBase.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/OperatorAggregationStateManagerTypedBase.java index 125572d1796..7d77b335832 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/OperatorAggregationStateManagerTypedBase.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/OperatorAggregationStateManagerTypedBase.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.IntegerArraySource; import io.deephaven.engine.table.impl.util.TypedHasherUtil.BuildOrProbeContext.BuildContext; @@ -58,7 +59,7 @@ public final int maxTableSize() { private int freeOverflowCount = 0; // the keys for our hash entries - protected final ArrayBackedColumnSource[] mainKeySources; + protected final WritableColumnSource[] mainKeySources; // the location of the first overflow entry in this bucket, parallel to keySources protected final IntegerArraySource mainOverflowLocationSource = new IntegerArraySource(); @@ -66,7 +67,7 @@ public final int maxTableSize() { private int nextOverflowLocation = 0; // the overflow chains, logically a linked list using integer pointers into these three parallel array sources - protected final ArrayBackedColumnSource[] overflowKeySources; + protected final WritableColumnSource[] overflowKeySources; // the location of the next key in an overflow bucket, parallel with overflowKeySources protected final IntegerArraySource overflowOverflowLocationSource = new IntegerArraySource(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/BooleanSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/BooleanSetResult.java index 82ce5bec62e..02615054fa2 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/BooleanSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/BooleanSetResult.java @@ -3,8 +3,8 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.BooleanUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.BooleanArraySource; import io.deephaven.engine.table.impl.ssms.ObjectSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -14,7 +14,7 @@ public class BooleanSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final BooleanArraySource resultColumn; - public BooleanSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public BooleanSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (BooleanArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ByteSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ByteSetResult.java index 74d8fff36e4..0e55b8fb2d3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ByteSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ByteSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.ByteArraySource; import io.deephaven.engine.table.impl.ssms.ByteSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class ByteSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final ByteArraySource resultColumn; - public ByteSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public ByteSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (ByteArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/CharSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/CharSetResult.java index feaa65d9501..5c0736c980b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/CharSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/CharSetResult.java @@ -3,7 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.CharacterArraySource; import io.deephaven.engine.table.impl.ssms.CharSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -14,7 +14,7 @@ public class CharSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final CharacterArraySource resultColumn; - public CharSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public CharSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (CharacterArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DateTimeSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DateTimeSetResult.java index b00f1566bbf..c591001dccc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DateTimeSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DateTimeSetResult.java @@ -3,8 +3,8 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.QueryConstants; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DateTimeArraySource; import io.deephaven.engine.table.impl.ssms.LongSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -14,7 +14,7 @@ public class DateTimeSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final DateTimeArraySource resultColumn; - public DateTimeSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public DateTimeSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (DateTimeArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DoubleSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DoubleSetResult.java index ba00bdcf603..76ad3ad5921 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DoubleSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/DoubleSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.DoubleSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class DoubleSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final DoubleArraySource resultColumn; - public DoubleSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public DoubleSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (DoubleArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/FloatSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/FloatSetResult.java index 3be1c1f7971..014e4e63368 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/FloatSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/FloatSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.FloatArraySource; import io.deephaven.engine.table.impl.ssms.FloatSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class FloatSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final FloatArraySource resultColumn; - public FloatSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public FloatSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (FloatArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/IntSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/IntSetResult.java index 4b2b27067c7..b2f6ba30c66 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/IntSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/IntSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.IntegerArraySource; import io.deephaven.engine.table.impl.ssms.IntSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class IntSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final IntegerArraySource resultColumn; - public IntSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public IntSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (IntegerArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/LongSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/LongSetResult.java index 3f4ae448668..84ad8715c5b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/LongSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/LongSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.engine.table.impl.ssms.LongSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class LongSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final LongArraySource resultColumn; - public LongSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public LongSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (LongArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ObjectSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ObjectSetResult.java index 12b51d1e783..c803c5dd2be 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ObjectSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ObjectSetResult.java @@ -10,7 +10,7 @@ import java.util.Objects; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.ObjectArraySource; import io.deephaven.engine.table.impl.ssms.ObjectSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -20,7 +20,7 @@ public class ObjectSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final ObjectArraySource resultColumn; - public ObjectSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public ObjectSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (ObjectArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ShortSetResult.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ShortSetResult.java index cdfa065ee22..0b2b5b435ee 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ShortSetResult.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/ShortSetResult.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmminmax; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.ShortArraySource; import io.deephaven.engine.table.impl.ssms.ShortSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class ShortSetResult implements SsmChunkedMinMaxOperator.SetResult { private final boolean minimum; private final ShortArraySource resultColumn; - public ShortSetResult(boolean minimum, ArrayBackedColumnSource resultColumn) { + public ShortSetResult(boolean minimum, WritableColumnSource resultColumn) { this.minimum = minimum; this.resultColumn = (ShortArraySource) resultColumn; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java index b0567ec5b10..8f2778cf44f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.configuration.Configuration; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.time.DateTime; import io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; @@ -27,7 +28,7 @@ public class SsmChunkedMinMaxOperator implements IterativeChunkedAggregationOperator { private static final int NODE_SIZE = Configuration.getInstance().getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); - private final ArrayBackedColumnSource resultColumn; + private final WritableColumnSource resultColumn; private final ObjectArraySource ssms; private final String name; private final CompactKernel compactAndCountKernel; @@ -58,7 +59,7 @@ public SsmChunkedMinMaxOperator( } private static SetResult makeSetResult(ChunkType chunkType, Class type, boolean minimum, - ArrayBackedColumnSource resultColumn) { + WritableColumnSource resultColumn) { if (type == DateTime.class) { return new DateTimeSetResult(minimum, resultColumn); } else if (type == Boolean.class) { @@ -391,7 +392,7 @@ public IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isMinim } private class SecondaryOperator implements IterativeChunkedAggregationOperator { - private final ArrayBackedColumnSource resultColumn; + private final WritableColumnSource resultColumn; private final String resultName; private final SetResult setResult; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BooleanPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BooleanPercentileTypeHelper.java index d50a23fc0b1..a26d4c47777 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BooleanPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BooleanPercentileTypeHelper.java @@ -5,9 +5,9 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.BooleanUtils; import io.deephaven.util.compare.ObjectComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.BooleanArraySource; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.Chunk; @@ -21,7 +21,7 @@ public class BooleanPercentileTypeHelper implements SsmChunkedPercentileOperator private final double percentile; private final BooleanArraySource resultColumn; - BooleanPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + BooleanPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (BooleanArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeHelper.java index 41d6c89a4e1..af7ecc715ac 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ByteComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ByteArraySource; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class BytePercentileTypeHelper implements SsmChunkedPercentileOperator.Pe private final double percentile; private final ByteArraySource resultColumn; - BytePercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + BytePercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (ByteArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeMedianHelper.java index 897fa78ef22..1a53c117b64 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/BytePercentileTypeMedianHelper.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.ByteSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class BytePercentileTypeMedianHelper extends BytePercentileTypeHelper { private final double percentile; private final DoubleArraySource resultColumn; - BytePercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + BytePercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, null); this.percentile = percentile; // region resultColumn diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeHelper.java index ec7a4077928..4cf34c7c58d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeHelper.java @@ -5,8 +5,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.CharComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.CharacterArraySource; import io.deephaven.chunk.CharChunk; import io.deephaven.chunk.Chunk; @@ -21,7 +21,7 @@ public class CharPercentileTypeHelper implements SsmChunkedPercentileOperator.Pe private final double percentile; private final CharacterArraySource resultColumn; - CharPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + CharPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (CharacterArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeMedianHelper.java index fd45988646a..42864c4e39d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/CharPercentileTypeMedianHelper.java @@ -3,7 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.CharSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -14,7 +14,7 @@ public class CharPercentileTypeMedianHelper extends CharPercentileTypeHelper { private final double percentile; private final DoubleArraySource resultColumn; - CharPercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + CharPercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, null); this.percentile = percentile; // region resultColumn diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DateTimePercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DateTimePercentileTypeHelper.java index 9dbb488e9f1..5fcf439ceeb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DateTimePercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DateTimePercentileTypeHelper.java @@ -5,8 +5,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.LongComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DateTimeArraySource; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.IntChunk; @@ -21,7 +21,7 @@ public class DateTimePercentileTypeHelper implements SsmChunkedPercentileOperato private final double percentile; private final DateTimeArraySource resultColumn; - DateTimePercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + DateTimePercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (DateTimeArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeHelper.java index f39d8d4242f..c0def03af67 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.DoubleComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class DoublePercentileTypeHelper implements SsmChunkedPercentileOperator. private final double percentile; private final DoubleArraySource resultColumn; - DoublePercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + DoublePercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (DoubleArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeMedianHelper.java index 243ef56ac1c..3ada481d66b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/DoublePercentileTypeMedianHelper.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.DoubleSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class DoublePercentileTypeMedianHelper extends DoublePercentileTypeHelper private final double percentile; private final DoubleArraySource resultColumn; - DoublePercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + DoublePercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, resultColumn); this.percentile = percentile; this.resultColumn = (DoubleArraySource)resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeHelper.java index 43125c9f5af..124c724f991 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.FloatComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.FloatArraySource; import io.deephaven.chunk.FloatChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class FloatPercentileTypeHelper implements SsmChunkedPercentileOperator.P private final double percentile; private final FloatArraySource resultColumn; - FloatPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + FloatPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (FloatArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeMedianHelper.java index fc2c9df09da..0a0921bf5b7 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/FloatPercentileTypeMedianHelper.java @@ -3,7 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.FloatArraySource; import io.deephaven.engine.table.impl.ssms.FloatSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -14,7 +14,7 @@ public class FloatPercentileTypeMedianHelper extends FloatPercentileTypeHelper { private final double percentile; private final FloatArraySource resultColumn; - FloatPercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + FloatPercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, resultColumn); this.percentile = percentile; this.resultColumn = (FloatArraySource)resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeHelper.java index b639cb8aa14..a90e351f97b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.IntComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.IntegerArraySource; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class IntPercentileTypeHelper implements SsmChunkedPercentileOperator.Per private final double percentile; private final IntegerArraySource resultColumn; - IntPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + IntPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (IntegerArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeMedianHelper.java index 9a36f19cab3..da6c7eb8a51 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/IntPercentileTypeMedianHelper.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.IntSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class IntPercentileTypeMedianHelper extends IntPercentileTypeHelper { private final double percentile; private final DoubleArraySource resultColumn; - IntPercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + IntPercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, null); this.percentile = percentile; // region resultColumn diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeHelper.java index c44ae0c8f69..721d929a179 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.LongComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class LongPercentileTypeHelper implements SsmChunkedPercentileOperator.Pe private final double percentile; private final LongArraySource resultColumn; - LongPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + LongPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (LongArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeMedianHelper.java index 55464b6a443..412be9e4ce2 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/LongPercentileTypeMedianHelper.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.LongSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class LongPercentileTypeMedianHelper extends LongPercentileTypeHelper { private final double percentile; private final DoubleArraySource resultColumn; - LongPercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + LongPercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, null); this.percentile = percentile; // region resultColumn diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ObjectPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ObjectPercentileTypeHelper.java index 17cc56a2cf5..ed17fe05e91 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ObjectPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ObjectPercentileTypeHelper.java @@ -12,8 +12,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ObjectComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ObjectArraySource; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.Chunk; @@ -27,7 +27,7 @@ public class ObjectPercentileTypeHelper implements SsmChunkedPercentileOperator. private final double percentile; private final ObjectArraySource resultColumn; - ObjectPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + ObjectPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (ObjectArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeHelper.java index 888b92cfc11..636afef0114 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeHelper.java @@ -10,8 +10,8 @@ import io.deephaven.chunk.attributes.ChunkLengths; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ShortComparisons; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ShortArraySource; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.Chunk; @@ -26,7 +26,7 @@ public class ShortPercentileTypeHelper implements SsmChunkedPercentileOperator.P private final double percentile; private final ShortArraySource resultColumn; - ShortPercentileTypeHelper(double percentile, ArrayBackedColumnSource resultColumn) { + ShortPercentileTypeHelper(double percentile, WritableColumnSource resultColumn) { this.percentile = percentile; // region resultColumn this.resultColumn = (ShortArraySource) resultColumn; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeMedianHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeMedianHelper.java index 1f7db381607..874c529c14a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeMedianHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/ShortPercentileTypeMedianHelper.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmpercentile; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.ssms.ShortSegmentedSortedMultiset; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; @@ -19,7 +19,7 @@ public class ShortPercentileTypeMedianHelper extends ShortPercentileTypeHelper { private final double percentile; private final DoubleArraySource resultColumn; - ShortPercentileTypeMedianHelper(double percentile, ArrayBackedColumnSource resultColumn) { + ShortPercentileTypeMedianHelper(double percentile, WritableColumnSource resultColumn) { super(percentile, null); this.percentile = percentile; // region resultColumn diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/SsmChunkedPercentileOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/SsmChunkedPercentileOperator.java index cb43a26378b..1af2443f822 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/SsmChunkedPercentileOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmpercentile/SsmChunkedPercentileOperator.java @@ -10,6 +10,7 @@ import io.deephaven.configuration.Configuration; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.time.DateTime; import io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator; import io.deephaven.engine.table.impl.sources.*; @@ -29,7 +30,7 @@ public class SsmChunkedPercentileOperator implements IterativeChunkedAggregationOperator { private static final int NODE_SIZE = Configuration.getInstance().getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); - private final ArrayBackedColumnSource internalResult; + private final WritableColumnSource internalResult; private final ColumnSource externalResult; /** * Even slots hold the low values, odd slots hold the high values. @@ -84,7 +85,7 @@ public SsmChunkedPercentileOperator(Class type, double percentile, boolean av } private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class type, double percentile, - boolean averageEvenlyDivided, ArrayBackedColumnSource resultColumn) { + boolean averageEvenlyDivided, WritableColumnSource resultColumn) { if (averageEvenlyDivided) { switch (chunkType) { // for things that are not int, long, double, or float we do not actually average the median; @@ -138,7 +139,7 @@ private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class @NotNull private static PercentileTypeHelper makeObjectHelper(Class type, double percentile, - ArrayBackedColumnSource resultColumn) { + WritableColumnSource resultColumn) { if (type == Boolean.class) { return new BooleanPercentileTypeHelper(percentile, resultColumn); } else if (type == DateTime.class) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotInternalListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotInternalListener.java index 2775ae34c0f..614aa53d42c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotInternalListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotInternalListener.java @@ -10,7 +10,6 @@ import io.deephaven.engine.table.impl.LazySnapshotTable; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.TableUpdateImpl; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.SingleValueColumnSource; import java.util.Map; @@ -22,7 +21,7 @@ public class SnapshotInternalListener extends BaseTable.ListenerImpl { private long snapshotPrevLength; private final QueryTable result; private final Map> resultTriggerColumns; - private final Map> resultBaseColumns; + private final Map> resultBaseColumns; private final Map> triggerStampColumns; private final Map> snapshotDataColumns; private final TrackingWritableRowSet resultRowSet; @@ -32,7 +31,7 @@ public SnapshotInternalListener(QueryTable triggerTable, Table snapshotTable, QueryTable result, Map> resultTriggerColumns, - Map> resultBaseColumns, + Map> resultBaseColumns, TrackingWritableRowSet resultRowSet) { super("snapshot " + result.getColumnSourceMap().keySet(), triggerTable, result); this.triggerTable = triggerTable; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/LongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/LongMegaMergeKernel.java index c27d418351f..82034f0178d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/LongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/LongMegaMergeKernel.java @@ -6,9 +6,9 @@ import io.deephaven.chunk.attributes.Any; import io.deephaven.chunk.attributes.Indices; import io.deephaven.engine.table.Context; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.SortingOrder; import io.deephaven.engine.table.impl.sort.megamerge.*; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.*; @@ -82,7 +82,7 @@ static LongMegaMergeKernel valuesDestinationSource, + void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeDescendingKernel.java index 4ad04923f62..5ba94a7fe7e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ByteComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ByteArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ByteLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ByteLongMegaMergeDescendingKernel.merge(indexDestinationSource, (ByteArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeKernel.java index e1a9ef02f99..eabc2ac4e44 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ByteLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ByteComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ByteArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ByteLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ByteLongMegaMergeKernel.merge(indexDestinationSource, (ByteArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeDescendingKernel.java index 337e0af3e9d..9a3b1d1ec4f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeDescendingKernel.java @@ -3,10 +3,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.CharComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.CharacterArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -27,7 +27,7 @@ public static class CharLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { CharLongMegaMergeDescendingKernel.merge(indexDestinationSource, (CharacterArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeKernel.java index 872c9cd8011..24a3b80fdcd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/CharLongMegaMergeKernel.java @@ -3,10 +3,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.CharComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.CharacterArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -27,7 +27,7 @@ public static class CharLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { CharLongMegaMergeKernel.merge(indexDestinationSource, (CharacterArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeDescendingKernel.java index db14a207e96..17bda359247 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class DoubleLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { DoubleLongMegaMergeDescendingKernel.merge(indexDestinationSource, (DoubleArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeKernel.java index e2a4884a1a9..b5072610b93 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/DoubleLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.DoubleArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class DoubleLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { DoubleLongMegaMergeKernel.merge(indexDestinationSource, (DoubleArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeDescendingKernel.java index 67fdaf18917..98cb3884632 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.FloatComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.FloatArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class FloatLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { FloatLongMegaMergeDescendingKernel.merge(indexDestinationSource, (FloatArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeKernel.java index 4f692063e41..f38d35fc7bf 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/FloatLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.FloatComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.FloatArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class FloatLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { FloatLongMegaMergeKernel.merge(indexDestinationSource, (FloatArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeDescendingKernel.java index 7d43d4316f0..5051e7b3184 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.IntComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.IntegerArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class IntLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { IntLongMegaMergeDescendingKernel.merge(indexDestinationSource, (IntegerArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeKernel.java index 037e81a6c3d..2045660a913 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/IntLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.IntComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.IntegerArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class IntLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { IntLongMegaMergeKernel.merge(indexDestinationSource, (IntegerArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeDescendingKernel.java index ec33be5c0b0..b268d6f7590 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.LongComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class LongLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { LongLongMegaMergeDescendingKernel.merge(indexDestinationSource, (LongArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeKernel.java index 12dfa2618b9..6b92a826819 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/LongLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.LongComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class LongLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { LongLongMegaMergeKernel.merge(indexDestinationSource, (LongArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeDescendingKernel.java index 8457db2a8e0..aff52ab548a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ObjectComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ObjectArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ObjectLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ObjectLongMegaMergeDescendingKernel.merge(indexDestinationSource, (ObjectArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeKernel.java index 9076fe8ed5e..4e67a39a622 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ObjectLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ObjectComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ObjectArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ObjectLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ObjectLongMegaMergeKernel.merge(indexDestinationSource, (ObjectArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeDescendingKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeDescendingKernel.java index ba7aa91cf3e..179548e1b97 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeDescendingKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeDescendingKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ShortComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ShortArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ShortLongMegaMergeDescendingKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ShortLongMegaMergeDescendingKernel.merge(indexDestinationSource, (ShortArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeKernel.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeKernel.java index d75f8cdf7c8..00511c176d3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeKernel.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sort/megamerge/ShortLongMegaMergeKernel.java @@ -8,10 +8,10 @@ */ package io.deephaven.engine.table.impl.sort.megamerge; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.util.compare.ShortComparisons; import io.deephaven.engine.table.impl.sort.LongMegaMergeKernel; import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ShortArraySource; import io.deephaven.engine.table.impl.sources.LongArraySource; import io.deephaven.chunk.attributes.Any; @@ -32,7 +32,7 @@ public static class ShortLongMegaMergeKernelContext valuesDestinationSource, + public void merge(LongArraySource indexDestinationSource, WritableColumnSource valuesDestinationSource, long destinationOffset, long destinationSize, LongChunk indexKeys, Chunk valuesToMerge) { ShortLongMegaMergeKernel.merge(indexDestinationSource, (ShortArraySource) valuesDestinationSource, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java index 1c3c8343ae7..f7e68e43fec 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java @@ -8,7 +8,6 @@ import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.util.type.ArrayTypeUtils; import io.deephaven.time.DateTime; -import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; @@ -31,7 +30,6 @@ import io.deephaven.qst.type.InstantType; import io.deephaven.qst.type.StringType; import io.deephaven.util.SoftRecycler; -import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -99,7 +97,7 @@ public abstract class ArrayBackedColumnSource () -> new long[IN_USE_BLOCK_SIZE], block -> Arrays.fill(block, 0)); - public static ArrayBackedColumnSource from(Array array) { + public static WritableColumnSource from(Array array) { return array.walk(new ArrayAdapter<>()).getOut(); } @@ -174,10 +172,10 @@ public void set(long key, short value) { * @param componentType the component type of the resulting column source * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final Collection data, + public static WritableColumnSource getMemoryColumnSource(@NotNull final Collection data, @NotNull final Class dataType, @Nullable final Class componentType) { - final ArrayBackedColumnSource result = getMemoryColumnSource(data.size(), dataType, componentType); + final WritableColumnSource result = getMemoryColumnSource(data.size(), dataType, componentType); long i = 0; for (T o : data) { result.set(i++, o); @@ -193,10 +191,10 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull fina * @param componentType the component type of the resulting column source * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final T[] data, + public static WritableColumnSource getMemoryColumnSource(@NotNull final T[] data, @NotNull final Class dataType, @Nullable final Class componentType) { - final ArrayBackedColumnSource result = getMemoryColumnSource(data.length, dataType, componentType); + final WritableColumnSource result = getMemoryColumnSource(data.length, dataType, componentType); try (final FillFromContext context = result.makeFillFromContext(data.length); final RowSequence range = RowSequenceFactory.forRange(0, data.length - 1)) { result.fillFromChunk(context, ObjectChunk.chunkWrap(data), range); @@ -324,8 +322,8 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final * the epoch * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getDateTimeMemoryColumnSource(LongChunk data) { - final ArrayBackedColumnSource result = new DateTimeArraySource(); + public static WritableColumnSource getDateTimeMemoryColumnSource(LongChunk data) { + final WritableColumnSource result = new DateTimeArraySource(); result.ensureCapacity(data.size()); for (int ii = 0; ii < data.size(); ++ii) { result.set(ii, data.get(ii)); @@ -340,8 +338,8 @@ public static ArrayBackedColumnSource getDateTimeMemoryColumnSource(Lo * the epoch * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getDateTimeMemoryColumnSource(@NotNull final long[] data) { - final ArrayBackedColumnSource result = new DateTimeArraySource(); + public static WritableColumnSource getDateTimeMemoryColumnSource(@NotNull final long[] data) { + final WritableColumnSource result = new DateTimeArraySource(); result.ensureCapacity(data.length); final WritableColumnSource asLong = (WritableColumnSource) result.reinterpret(long.class); try (final FillFromContext context = asLong.makeFillFromContext(data.length); @@ -375,12 +373,12 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull fina * @param the type parameter for the ColumnSource's type * @return an in-memory column source of the requested type */ - public static ArrayBackedColumnSource getMemoryColumnSource(final long size, + public static WritableColumnSource getMemoryColumnSource(final long size, @NotNull final Class dataType) { return getMemoryColumnSource(size, dataType, null); } - public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final Class dataType, + public static WritableColumnSource getMemoryColumnSource(@NotNull final Class dataType, @Nullable final Class componentType) { return getMemoryColumnSource(0, dataType, componentType); } @@ -394,9 +392,9 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull fina * @param the type parameter for the ColumnSource's type * @return an in-memory column source of the requested type */ - public static ArrayBackedColumnSource getMemoryColumnSource(final long size, + public static WritableColumnSource getMemoryColumnSource(final long size, @NotNull final Class dataType, @Nullable final Class componentType) { - final ArrayBackedColumnSource result; + final WritableColumnSource result; if (dataType == byte.class || dataType == Byte.class) { result = new ByteArraySource(); } else if (dataType == char.class || dataType == Character.class) { @@ -428,7 +426,7 @@ public static ArrayBackedColumnSource getMemoryColumnSource(final long si result.ensureCapacity(size); } // noinspection unchecked - return (ArrayBackedColumnSource) result; + return (WritableColumnSource) result; } @Override @@ -570,9 +568,9 @@ public Chunk getChunk(@NotNull final GetContext context, @NotNull final } private static class ArrayAdapter implements Array.Visitor, PrimitiveArray.Visitor { - private ArrayBackedColumnSource out; + private WritableColumnSource out; - public ArrayBackedColumnSource getOut() { + public WritableColumnSource getOut() { return Objects.requireNonNull(out); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java index 623aa639e5a..1f79dbd4d39 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java @@ -6,7 +6,6 @@ import io.deephaven.base.verify.Require; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.LongChunk; -import io.deephaven.chunk.ResettableWritableChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; @@ -15,14 +14,16 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.table.impl.util.ShiftData; import io.deephaven.time.DateTime; -import io.deephaven.util.SoftRecycler; import org.jetbrains.annotations.NotNull; import java.time.*; -public abstract class NanosBasedTimeArraySource extends ArraySourceHelper - implements WritableColumnSource, ConvertableTimeSource { +public abstract class NanosBasedTimeArraySource extends AbstractDeferredGroupingColumnSource + implements FillUnordered, ShiftData.ShiftCallback, WritableColumnSource, + InMemoryColumnSource, WritableSourceWithPrepareForParallelPopulation, ConvertableTimeSource { protected final LongArraySource nanoSource; @@ -91,61 +92,11 @@ public void startTrackingPrevValues() { nanoSource.startTrackingPrevValues(); } - @Override - long[] allocateNullFilledBlock(int size) { - return nanoSource.allocateNullFilledBlock(size); - } - - @Override - long[] allocateBlock(int size) { - return nanoSource.allocateBlock(size); - } - - @Override - void resetBlocks(long[][] newBlocks, long[][] newPrev) { - nanoSource.resetBlocks(newBlocks, newPrev); - } - - @Override - long[][] getPrevBlocks() { - return nanoSource.getPrevBlocks(); - } - - @Override - SoftRecycler getRecycler() { - return nanoSource.getRecycler(); - } - @Override public void ensureCapacity(long size, boolean nullFill) { nanoSource.ensureCapacity(size, nullFill); } - @Override - Object getBlock(int blockIndex) { - return nanoSource.getBlock(blockIndex); - } - - @Override - Object getPrevBlock(int blockIndex) { - return nanoSource.getPrevBlock(blockIndex); - } - - @Override - public boolean exposesChunkedBackingStore() { - return false; - } - - @Override - public long resetWritableChunkToBackingStore(@NotNull ResettableWritableChunk chunk, long position) { - throw new UnsupportedOperationException(); - } - - @Override - public long resetWritableChunkToBackingStoreSlice(@NotNull ResettableWritableChunk chunk, long position) { - throw new UnsupportedOperationException(); - } - @Override public void prepareForParallelPopulation(RowSet rowSet) { nanoSource.prepareForParallelPopulation(rowSet); @@ -182,41 +133,24 @@ public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSeque } @Override - protected void fillSparseChunk( - @NotNull final WritableChunk destGeneric, - @NotNull final RowSequence indices) { - nanoSource.fillSparseChunk(destGeneric, indices, this::makeValue); - } - - @Override - protected void fillSparsePrevChunk( - @NotNull final WritableChunk destGeneric, - @NotNull final RowSequence indices) { - nanoSource.fillSparsePrevChunk(destGeneric, indices, this::makeValue); - } - - @Override - protected void fillSparseChunkUnordered( - @NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { - nanoSource.fillSparseChunkUnordered(destGeneric, indices, this::makeValue); - } - - @Override - protected void fillSparsePrevChunkUnordered( - @NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { - nanoSource.fillSparsePrevChunkUnordered(destGeneric, indices, this::makeValue); + public boolean providesFillUnordered() { + return true; } @Override - public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - nanoSource.fillFromChunkByRanges(rowSequence, src, this::toNanos); + public void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys) { + nanoSource.fillSparseChunkUnordered(dest, keys, this::makeValue); } @Override - public void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); + public void fillPrevChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull final LongChunk keys) { + nanoSource.fillSparsePrevChunkUnordered(dest, keys, this::makeValue); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java index 73ee549104a..d830d01446a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java @@ -585,7 +585,7 @@ public void startTrackingPrevValues() { throw new UnsupportedOperationException("Can't call startTrackingPrevValues() twice"); } deltaCapacity = INITIAL_DELTA_CAPACITY; - final ArrayBackedColumnSource delta = + final WritableColumnSource delta = ArrayBackedColumnSource.getMemoryColumnSource(deltaCapacity, getType(), null); this.delta = delta; deltaCapacityEnsurer = delta::ensureCapacity; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java index f381458d2c3..8fdf20af2c5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.config.InputTableStatusListener; @@ -76,8 +77,8 @@ private static boolean applicableForReadOnly(String attributeName) { return !Table.INPUT_TABLE_ATTRIBUTE.equals(attributeName); } - protected static Map> makeColumnSourceMap(TableDefinition definition) { - final Map> resultMap = new LinkedHashMap<>(); + protected static Map> makeColumnSourceMap(TableDefinition definition) { + final Map> resultMap = new LinkedHashMap<>(); for (final ColumnDefinition columnDefinition : definition.getColumns()) { resultMap.put(columnDefinition.getName(), ArrayBackedColumnSource.getMemoryColumnSource(0, columnDefinition.getDataType())); @@ -313,7 +314,7 @@ public void setRows(@NotNull Table defaultValues, int[] rowArray, Map> columnDefinitions = getTableDefinition().getColumns(); - final Map> sources = + final Map> sources = buildSourcesMap(valueArray.length, columnDefinitions); final String[] kabmtColumns = getTableDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); @@ -330,7 +331,7 @@ public void setRows(@NotNull Table defaultValues, int[] rowArray, Map cs = Require.neqNull(entry.getValue(), "defaultValue column source: " + colName); - final ArrayBackedColumnSource dest = + final WritableColumnSource dest = Require.neqNull(sources.get(colName), "destination column source: " + colName); final RowSet defaultValuesRowSet = defaultValues.getRowSet(); @@ -363,7 +364,7 @@ public void setRows(@NotNull Table defaultValues, int[] rowArray, Map[] valueArray, boolean allowEdits, InputTableStatusListener listener) { final List> columnDefinitions = getTableDefinition().getColumns(); - final Map> sources = + final Map> sources = buildSourcesMap(valueArray.length, columnDefinitions); for (int rowNumber = 0; rowNumber < valueArray.length; rowNumber++) { @@ -381,11 +382,11 @@ public void addRows(Map[] valueArray, boolean allowEdits, InputT } @NotNull - private Map> buildSourcesMap(int capacity, + private Map> buildSourcesMap(int capacity, List> columnDefinitions) { - final Map> sources = new LinkedHashMap<>(); + final Map> sources = new LinkedHashMap<>(); for (final ColumnDefinition columnDefinition : columnDefinitions) { - ArrayBackedColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( + WritableColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( capacity, columnDefinition.getDataType()); // noinspection unchecked final ArrayBackedColumnSource memoryColumnSource = (ArrayBackedColumnSource) cs; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java index 6c16a4ada32..9530a33de45 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ColumnHolder.java @@ -17,6 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequenceFactory; import io.deephaven.engine.table.ChunkSink; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.vector.ObjectVector; import io.deephaven.time.DateTime; import io.deephaven.api.util.NameValidator; @@ -171,7 +172,7 @@ private ColumnHolder(String name, boolean grouped, Class dataType, Class c * ColumnHolder where the official data type type does not match the data. * * @param name column name - * @param type abstract data type for the column + * @param dataType abstract data type for the column * @param grouped true if the column is grouped; false otherwise * @param chunkData column data */ @@ -285,7 +286,7 @@ public ColumnSource getColumnSource() { return ArrayBackedColumnSource.getDateTimeMemoryColumnSource(chunkData.asLongChunk()); } - final ArrayBackedColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( + final WritableColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( chunkData.size(), dataType, componentType); try (final ChunkSink.FillFromContext ffc = cs.makeFillFromContext(chunkData.size())) { cs.fillFromChunk(ffc, chunkData, RowSequenceFactory.forRange(0, chunkData.size() - 1)); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java index ada940b16c3..7930b5e0cf9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java @@ -5,6 +5,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.qst.column.header.ColumnHeader; import io.deephaven.qst.table.TableHeader; import io.deephaven.qst.type.Type; @@ -36,7 +37,7 @@ */ public class DynamicTableWriter implements TableWriter { private final UpdateSourceQueryTable table; - private final ArrayBackedColumnSource[] arrayColumnSources; + private final WritableColumnSource[] arrayColumnSources; private final String[] columnNames; private int allocatedSize; @@ -210,7 +211,7 @@ private void ensureCapacity(int row) { newSize = 2 * newSize; } - for (final ArrayBackedColumnSource arrayColumnSource : arrayColumnSources) { + for (final WritableColumnSource arrayColumnSource : arrayColumnSources) { if (arrayColumnSource != null) { arrayColumnSource.ensureCapacity(newSize); } @@ -366,7 +367,7 @@ private static Map> getSources( singleValueColumnSource.set(constantValues.get(columnNames[i])); sources.put(columnNames[i], singleValueColumnSource); } else { - ArrayBackedColumnSource source = + WritableColumnSource source = ArrayBackedColumnSource.getMemoryColumnSource(allocatedSize, columnTypes.apply(i)); sources.put(columnNames[i], source); @@ -389,7 +390,7 @@ private DynamicTableWriter(final Map> sources, final Map this.table = new UpdateSourceQueryTable(RowSetFactory.fromKeys().toTracking(), sources); final int nCols = sources.size();; this.columnNames = new String[nCols]; - this.arrayColumnSources = new ArrayBackedColumnSource[nCols]; + this.arrayColumnSources = new WritableColumnSource[nCols]; int ii = 0; for (Map.Entry> entry : sources.entrySet()) { final String columnName = columnNames[ii] = entry.getKey(); @@ -397,52 +398,57 @@ private DynamicTableWriter(final Map> sources, final Map if (constantValues.containsKey(columnName)) { continue; } - if (source instanceof ArrayBackedColumnSource) { - arrayColumnSources[ii] = (ArrayBackedColumnSource) source; + if (source instanceof WritableColumnSource) { + arrayColumnSources[ii] = (WritableColumnSource) source; } else { throw new IllegalStateException( "Expected ArrayBackedColumnSource, instead found " + source.getClass()); } factoryMap.put(columnName, - (currentRow) -> createRowSetter(source.getType(), (ArrayBackedColumnSource) source)); + (currentRow) -> createRowSetter(source.getType(), (WritableColumnSource) source)); ++ii; } UpdateGraphProcessor.DEFAULT.addSource(table); } - private RowSetterImpl createRowSetter(Class type, ArrayBackedColumnSource buffer) { + @SuppressWarnings("unchecked") + private RowSetterImpl createRowSetter(Class type, WritableColumnSource buffer) { + final RowSetterImpl result; if (type == boolean.class || type == Boolean.class) { - return new BooleanRowSetterImpl(buffer); + result = new BooleanRowSetterImpl((WritableColumnSource) buffer); } else if (type == byte.class || type == Byte.class) { - return new ByteRowSetterImpl(buffer); + result = new ByteRowSetterImpl((WritableColumnSource) buffer); } else if (type == char.class || type == Character.class) { - return new CharRowSetterImpl(buffer); + result = new CharRowSetterImpl((WritableColumnSource) buffer); } else if (type == double.class || type == Double.class) { - return new DoubleRowSetterImpl(buffer); + result = new DoubleRowSetterImpl((WritableColumnSource) buffer); } else if (type == float.class || type == Float.class) { - return new FloatRowSetterImpl(buffer); + result = new FloatRowSetterImpl((WritableColumnSource) buffer); } else if (type == int.class || type == Integer.class) { - return new IntRowSetterImpl(buffer); + result = new IntRowSetterImpl((WritableColumnSource) buffer); } else if (type == long.class || type == Long.class) { - return new LongRowSetterImpl(buffer); + result = new LongRowSetterImpl((WritableColumnSource) buffer); } else if (type == short.class || type == Short.class) { - return new ShortRowSetterImpl(buffer); + result = new ShortRowSetterImpl((WritableColumnSource) buffer); } else if (CharSequence.class.isAssignableFrom(type)) { - return new StringRowSetterImpl(buffer); + result = new StringRowSetterImpl((WritableColumnSource) buffer); + } else { + result = new ObjectRowSetterImpl<>(buffer, type); } - return new ObjectRowSetterImpl(buffer, type); + + return (RowSetterImpl) result; } public interface PermissiveRowSetter extends RowSetter { void setPermissive(T value); } - private static abstract class RowSetterImpl implements PermissiveRowSetter { - protected final ArrayBackedColumnSource columnSource; + private static abstract class RowSetterImpl implements PermissiveRowSetter { + protected final WritableColumnSource columnSource; protected int row; - private final Class type; + private final Class type; - RowSetterImpl(ArrayBackedColumnSource columnSource, Class type) { + RowSetterImpl(WritableColumnSource columnSource, Class type) { this.columnSource = columnSource; this.type = type; } @@ -455,17 +461,17 @@ void setRow(int row) { abstract void writeToColumnSource(); @Override - public Class getType() { + public Class getType() { return type; } @Override - public void set(Object value) { + public void set(T value) { throw new UnsupportedOperationException(); } @Override - public void setPermissive(Object value) { + public void setPermissive(T value) { set(value); } @@ -510,16 +516,16 @@ public void setShort(short value) { } } - private static class BooleanRowSetterImpl extends RowSetterImpl { - BooleanRowSetterImpl(ArrayBackedColumnSource array) { + private static class BooleanRowSetterImpl extends RowSetterImpl { + BooleanRowSetterImpl(WritableColumnSource array) { super(array, Boolean.class); } Boolean pendingBoolean; @Override - public void set(Object value) { - setBoolean(value == null ? QueryConstants.NULL_BOOLEAN : (Boolean) value); + public void set(Boolean value) { + setBoolean(value == null ? QueryConstants.NULL_BOOLEAN : value); } @Override @@ -529,25 +535,24 @@ public void setBoolean(Boolean value) { @Override void writeToColumnSource() { - // noinspection unchecked columnSource.set(row, pendingBoolean); } } - private static class ByteRowSetterImpl extends RowSetterImpl { - ByteRowSetterImpl(ArrayBackedColumnSource array) { + private static class ByteRowSetterImpl extends RowSetterImpl { + ByteRowSetterImpl(WritableColumnSource array) { super(array, byte.class); } byte pendingByte = QueryConstants.NULL_BYTE; @Override - public void set(Object value) { - setByte(value == null ? QueryConstants.NULL_BYTE : (Byte) value); + public void set(Byte value) { + setByte(value == null ? QueryConstants.NULL_BYTE : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Byte value) { setByte(value == null ? QueryConstants.NULL_BYTE : ((Number) value).byteValue()); } @@ -562,16 +567,16 @@ void writeToColumnSource() { } } - private static class CharRowSetterImpl extends RowSetterImpl { - CharRowSetterImpl(ArrayBackedColumnSource array) { + private static class CharRowSetterImpl extends RowSetterImpl { + CharRowSetterImpl(WritableColumnSource array) { super(array, char.class); } char pendingChar = QueryConstants.NULL_CHAR; @Override - public void set(Object value) { - setChar(value == null ? QueryConstants.NULL_CHAR : (Character) value); + public void set(Character value) { + setChar(value == null ? QueryConstants.NULL_CHAR : value); } @Override @@ -585,20 +590,20 @@ void writeToColumnSource() { } } - private static class IntRowSetterImpl extends RowSetterImpl { - IntRowSetterImpl(ArrayBackedColumnSource array) { + private static class IntRowSetterImpl extends RowSetterImpl { + IntRowSetterImpl(WritableColumnSource array) { super(array, int.class); } int pendingInt = QueryConstants.NULL_INT; @Override - public void set(Object value) { - setInt(value == null ? QueryConstants.NULL_INT : (Integer) value); + public void set(Integer value) { + setInt(value == null ? QueryConstants.NULL_INT : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Integer value) { setInt(value == null ? QueryConstants.NULL_INT : ((Number) value).intValue()); } @@ -613,20 +618,20 @@ void writeToColumnSource() { } } - private static class DoubleRowSetterImpl extends RowSetterImpl { - DoubleRowSetterImpl(ArrayBackedColumnSource array) { + private static class DoubleRowSetterImpl extends RowSetterImpl { + DoubleRowSetterImpl(WritableColumnSource array) { super(array, double.class); } double pendingDouble = QueryConstants.NULL_DOUBLE; @Override - public void set(Object value) { - setDouble(value == null ? QueryConstants.NULL_DOUBLE : (Double) value); + public void set(Double value) { + setDouble(value == null ? QueryConstants.NULL_DOUBLE : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Double value) { setDouble(value == null ? QueryConstants.NULL_DOUBLE : ((Number) value).doubleValue()); } @@ -641,20 +646,20 @@ void writeToColumnSource() { } } - private static class FloatRowSetterImpl extends RowSetterImpl { - FloatRowSetterImpl(ArrayBackedColumnSource array) { + private static class FloatRowSetterImpl extends RowSetterImpl { + FloatRowSetterImpl(WritableColumnSource array) { super(array, float.class); } float pendingFloat = QueryConstants.NULL_FLOAT; @Override - public void set(Object value) { - setFloat(value == null ? QueryConstants.NULL_FLOAT : (Float) value); + public void set(Float value) { + setFloat(value == null ? QueryConstants.NULL_FLOAT : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Float value) { setFloat(value == null ? QueryConstants.NULL_FLOAT : ((Number) value).floatValue()); } @@ -669,20 +674,20 @@ void writeToColumnSource() { } } - private static class LongRowSetterImpl extends RowSetterImpl { - LongRowSetterImpl(ArrayBackedColumnSource array) { + private static class LongRowSetterImpl extends RowSetterImpl { + LongRowSetterImpl(WritableColumnSource array) { super(array, long.class); } long pendingLong = QueryConstants.NULL_LONG; @Override - public void set(Object value) { - setLong(value == null ? QueryConstants.NULL_LONG : (Long) value); + public void set(Long value) { + setLong(value == null ? QueryConstants.NULL_LONG : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Long value) { setLong(value == null ? QueryConstants.NULL_LONG : ((Number) value).longValue()); } @@ -697,20 +702,20 @@ void writeToColumnSource() { } } - private static class ShortRowSetterImpl extends RowSetterImpl { - ShortRowSetterImpl(ArrayBackedColumnSource array) { + private static class ShortRowSetterImpl extends RowSetterImpl { + ShortRowSetterImpl(WritableColumnSource array) { super(array, short.class); } short pendingShort = QueryConstants.NULL_SHORT; @Override - public void set(Object value) { - setShort(value == null ? QueryConstants.NULL_SHORT : (Short) value); + public void set(Short value) { + setShort(value == null ? QueryConstants.NULL_SHORT : value); } @Override - public void setPermissive(Object value) { + public void setPermissive(Short value) { setShort(value == null ? QueryConstants.NULL_SHORT : ((Number) value).shortValue()); } @@ -725,39 +730,33 @@ void writeToColumnSource() { } } - private static class ObjectRowSetterImpl extends RowSetterImpl { - ObjectRowSetterImpl(ArrayBackedColumnSource array, Class type) { + private static class ObjectRowSetterImpl extends RowSetterImpl { + ObjectRowSetterImpl(WritableColumnSource array, Class type) { super(array, type); } - Object pendingObject; + T pendingObject; @Override - public void set(Object value) { + public void set(T value) { pendingObject = value; } @Override void writeToColumnSource() { - // noinspection unchecked columnSource.set(row, pendingObject); } } - private static class StringRowSetterImpl extends ObjectRowSetterImpl { - StringRowSetterImpl(@NotNull final ArrayBackedColumnSource array) { + private static class StringRowSetterImpl extends ObjectRowSetterImpl { + StringRowSetterImpl(@NotNull final WritableColumnSource array) { super(array, String.class); } - - @Override - public void set(final Object value) { - super.set(value == null ? null : value.toString()); - } } private class DynamicTableRow implements Row { - private final RowSetterImpl[] setters; - private final Map columnToSetter; + private final RowSetterImpl[] setters; + private final Map> columnToSetter; private int row = lastSetterRow; private Row.Flags flags = Flags.SingleRow; @@ -769,8 +768,8 @@ private DynamicTableRow() { } @Override - public PermissiveRowSetter getSetter(final String name) { - final PermissiveRowSetter rowSetter = columnToSetter.get(name); + public PermissiveRowSetter getSetter(final String name) { + final PermissiveRowSetter rowSetter = columnToSetter.get(name); if (rowSetter == null) { if (table.getColumnSourceMap().containsKey(name)) { throw new RuntimeException("Column has a constant value, can not get setter " + name); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java index 0941bc68a71..a4109422f9a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java @@ -83,7 +83,7 @@ private FunctionGeneratedTableFactory(final Function.Nullary

tableGenerat Table initialTable = tableGenerator.call(); for (Map.Entry> entry : initialTable.getColumnSourceMap().entrySet()) { ColumnSource columnSource = entry.getValue(); - final ArrayBackedColumnSource memoryColumnSource = ArrayBackedColumnSource.getMemoryColumnSource( + final WritableColumnSource memoryColumnSource = ArrayBackedColumnSource.getMemoryColumnSource( 0, columnSource.getType(), columnSource.getComponentType()); columns.put(entry.getKey(), memoryColumnSource); writableSources.put(entry.getKey(), memoryColumnSource); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableBuilder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableBuilder.java index 11381cbca6f..b43660c00b1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableBuilder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/TableBuilder.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.util.type.TypeUtils; @@ -86,17 +87,17 @@ private void checkRow(Object[] items) { * @return the table */ public Table build() { - Map> map = new LinkedHashMap<>(); + Map> map = new LinkedHashMap<>(); for (ColumnDefinition columnDefinition : def.getColumns()) { - ArrayBackedColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( + WritableColumnSource cs = ArrayBackedColumnSource.getMemoryColumnSource( rows.size(), columnDefinition.getDataType()); // noinspection unchecked - map.put(columnDefinition.getName(), (ArrayBackedColumnSource) cs); + map.put(columnDefinition.getName(), (WritableColumnSource) cs); } // Re-write column oriented int col = 0; - for (ArrayBackedColumnSource source : map.values()) { + for (WritableColumnSource source : map.values()) { for (int row = 0; row < rowCount(); row++) { source.set(row, rows.get(row)[col]); } diff --git a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java index f46a20afb28..3536f158415 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java @@ -373,7 +373,7 @@ public static String nullToNullString(Object obj) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(Class clazz, Collection values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.size(), clazz); + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.size(), clazz); int resultIndex = 0; for (T value : values) { result.set(resultIndex++, value); @@ -390,7 +390,7 @@ public static ColumnSource colSource(Class clazz, Collection values */ @SuppressWarnings("unchecked") public static ColumnSource objColSource(T... values) { - ArrayBackedColumnSource result = (ArrayBackedColumnSource) ArrayBackedColumnSource + WritableColumnSource result = (WritableColumnSource) ArrayBackedColumnSource .getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -405,7 +405,7 @@ public static ColumnSource objColSource(T... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(long... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, long.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -420,7 +420,7 @@ public static ColumnSource colSource(long... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(int... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, int.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -435,7 +435,7 @@ public static ColumnSource colSource(int... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(short... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, short.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -450,7 +450,7 @@ public static ColumnSource colSource(short... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(byte... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, byte.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -465,7 +465,7 @@ public static ColumnSource colSource(byte... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(char... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, char.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -480,7 +480,7 @@ public static ColumnSource colSource(char... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(double... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, double.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); @@ -495,7 +495,7 @@ public static ColumnSource colSource(double... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(float... values) { - ArrayBackedColumnSource result = + WritableColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.length, float.class); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java index 634a3823801..0bde34b3dbe 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java @@ -21,11 +21,8 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; -import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.updategraph.UpdateGraphProcessor; import org.jetbrains.annotations.NotNull; @@ -37,7 +34,6 @@ import java.util.Random; import static io.deephaven.util.QueryConstants.NULL_BOOLEAN; -import static junit.framework.TestCase.*; import static junit.framework.TestCase.assertEquals; public abstract class AbstractBooleanColumnSourceTest { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSourceTest.java index 992f06d04a9..51f5ba9119c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSourceTest.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.time.DateTime; import io.deephaven.qst.array.Array; import io.deephaven.qst.array.GenericArray; @@ -84,7 +85,7 @@ enum MyCustomType { private static void check(PrimitiveType type, T... values) { PrimitiveArray array = PrimitiveArray.of(type, values); - ArrayBackedColumnSource columnSource = ArrayBackedColumnSource.from(array); + WritableColumnSource columnSource = ArrayBackedColumnSource.from(array); int ix = 0; for (T left : values) { assertThat(columnSource.get(ix++)).isEqualTo(left); @@ -98,7 +99,7 @@ private static void check(GenericType type, T... values) { private static void check(BiPredicate comparison, GenericType type, T... values) { GenericArray array = GenericArray.of(type, values); - ArrayBackedColumnSource columnSource = ArrayBackedColumnSource.from(array); + WritableColumnSource columnSource = ArrayBackedColumnSource.from(array); int ix = 0; for (T left : values) { assertThat(columnSource.get(ix++)).matches((Predicate) right -> comparison.test(left, right)); @@ -108,7 +109,7 @@ private static void check(BiPredicate comparison, GenericType private static void check(BiPredicate comparison, Type type, T... values) { Array array = Array.of(type, values); - ArrayBackedColumnSource columnSource = ArrayBackedColumnSource.from(array); + WritableColumnSource columnSource = ArrayBackedColumnSource.from(array); int ix = 0; for (T left : values) { assertThat(columnSource.get(ix++)).matches((Predicate) right -> comparison.test(left, right)); diff --git a/extensions/csv/src/main/java/io/deephaven/csv/CsvTools.java b/extensions/csv/src/main/java/io/deephaven/csv/CsvTools.java index 2027bb5ac2a..3a53a694058 100644 --- a/extensions/csv/src/main/java/io/deephaven/csv/CsvTools.java +++ b/extensions/csv/src/main/java/io/deephaven/csv/CsvTools.java @@ -43,7 +43,6 @@ import io.deephaven.engine.table.impl.InMemoryTable; import io.deephaven.engine.table.impl.perf.QueryPerformanceNugget; import io.deephaven.engine.table.impl.perf.QueryPerformanceRecorder; -import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.BooleanArraySource; import io.deephaven.engine.table.impl.sources.ByteArraySource; import io.deephaven.engine.table.impl.sources.CharacterArraySource; @@ -970,12 +969,12 @@ public static CsvSpecs fromLegacyFormat(String format) { } private static abstract class MySinkBase implements Sink { - protected final ArrayBackedColumnSource result; + protected final WritableColumnSource result; protected long resultSize; protected final WritableColumnSource reinterpreted; protected final ChunkWrapInvoker> chunkWrapInvoker; - public MySinkBase(ArrayBackedColumnSource result, Class interpClass, + public MySinkBase(WritableColumnSource result, Class interpClass, ChunkWrapInvoker> chunkWrapInvoker) { this.result = result; this.resultSize = 0; @@ -1006,7 +1005,7 @@ public final void write(final TARRAY src, final boolean[] isNull, final long des protected abstract void nullFlagsToValues(final TARRAY values, final boolean[] isNull, final int size); - public ArrayBackedColumnSource result() { + public WritableColumnSource result() { return result; } @@ -1028,7 +1027,7 @@ private static abstract class MySourceAndSinkBase extends MySinkBa implements Source, Sink { private final ChunkWrapInvoker> writableChunkWrapInvoker; - public MySourceAndSinkBase(ArrayBackedColumnSource result, Class interpClass, + public MySourceAndSinkBase(WritableColumnSource result, Class interpClass, ChunkWrapInvoker> chunkWrapInvoker, ChunkWrapInvoker> writeableChunkWrapInvoker) { super(result, interpClass, chunkWrapInvoker); diff --git a/replication/static/build.gradle b/replication/static/build.gradle index 272936ad7e6..5a67528232b 100644 --- a/replication/static/build.gradle +++ b/replication/static/build.gradle @@ -84,8 +84,5 @@ task replicateAllSafe { // These replicators need manual fix-up post replication and should not be run without supervision task replicateAllUnsafe { - dependsOn Tasks.registerMainExecTask(project, 'replicatePrimitiveLibs', 'io.deephaven.replicators.ReplicatePrimitiveLibs') - dependsOn Tasks.registerMainExecTask(project, 'replicatePrimitiveLibTests', 'io.deephaven.replicators.ReplicatePrimitiveLibTests') - dependsOn Tasks.registerMainExecTask(project, 'replicateParquetChunkedWriters', 'io.deephaven.replicators.ReplicateParquetChunkedWriters') } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibTests.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibTests.java deleted file mode 100644 index c3231b10a2e..00000000000 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibTests.java +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.replicators; - -import io.deephaven.replication.ReplicatePrimitiveCode; - -import java.io.IOException; - -import static io.deephaven.replication.ReplicatePrimitiveCode.*; -import static io.deephaven.replication.ReplicatePrimitiveCode.floatToAllFloatingPoints; - -public class ReplicatePrimitiveLibTests { - public static void main(String[] args) throws IOException { - charToAllButBoolean("engine/function/src/test/java/io/deephaven/function/TestCharPrimitives.java"); - shortToAllIntegralTypes( - "engine/function/src/test/java/io/deephaven/function/TestShortNumericPrimitives.java"); - floatToAllFloatingPoints( - "engine/function/src/test/java/io/deephaven/function/TestFloatNumericPrimitives.java"); - floatToAllFloatingPoints( - "engine/function/src/test/java/io/deephaven/function/TestFloatFpPrimitives.java"); - } -} diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibs.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibs.java deleted file mode 100644 index ca9f8228e26..00000000000 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicatePrimitiveLibs.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.replicators; - -import io.deephaven.replication.ReplicationUtils; -import org.apache.commons.io.FileUtils; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.List; - -import static io.deephaven.replication.ReplicatePrimitiveCode.*; - -/** - * Autogenerates primitives from template java files (e.g. CharacterPrimitives, ShortNumericPrimitives, - * FloatNumericPrimitives, and FlotFpPrimitives). - */ -public class ReplicatePrimitiveLibs { - public static void main(String[] args) throws IOException { - List files = charToAllButBoolean( - "engine/function/src/main/java/io/deephaven/function/CharacterPrimitives.java"); - fixup(files); - - shortToAllIntegralTypes( - "engine/function/src/main/java/io/deephaven/function/ShortNumericPrimitives.java"); - floatToAllFloatingPoints( - "engine/function/src/main/java/io/deephaven/function/FloatNumericPrimitives.java"); - floatToAllFloatingPoints("engine/function/src/main/java/io/deephaven/function/FloatFpPrimitives.java"); - } - - private static void fixup(List files) throws IOException { - for (String file : files) { - final File fileyfile = new File(file); - List lines = FileUtils.readLines(fileyfile, Charset.defaultCharset()); - lines = ReplicationUtils.removeRegion(lines, "SortFixup"); - FileUtils.writeLines(fileyfile, lines); - } - } -} From f2a3e0ea69e992ec00638c7c1f34da87880ce92c Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 17:39:27 -0700 Subject: [PATCH 06/14] Use WritableColumnSource as return type of SparseArrayColumnSource#getSparseMemoryColumnSource --- .../engine/table/impl/QueryTable.java | 8 +-- .../engine/table/impl/SparseSelect.java | 16 ++--- .../table/impl/TableUpdateValidator.java | 7 +- .../snapshot/SnapshotIncrementalListener.java | 8 +-- .../NanosBasedTimeSparseArraySource.java | 68 +++++++++++-------- .../impl/sources/SparseArrayColumnSource.java | 33 ++++----- .../deltaaware/DeltaAwareColumnSource.java | 14 +++- .../engine/table/impl/util/ShiftData.java | 3 + 8 files changed, 90 insertions(+), 67 deletions(-) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java index 87693756e11..adae61c263b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java @@ -2043,21 +2043,21 @@ private Table snapshotIncrementalInternal(final Table base, final boolean doInit SnapshotUtils.maybeTransformToDirectVectorColumnSource(getColumnSource(stampColumn))); } - final Map> resultTriggerColumns = new LinkedHashMap<>(); + final Map> resultTriggerColumns = new LinkedHashMap<>(); for (Map.Entry> entry : triggerColumns.entrySet()) { final String name = entry.getKey(); final ColumnSource cs = entry.getValue(); final Class type = cs.getType(); - final SparseArrayColumnSource stampDest = Vector.class.isAssignableFrom(type) + final WritableColumnSource stampDest = Vector.class.isAssignableFrom(type) ? SparseArrayColumnSource.getSparseMemoryColumnSource(type, cs.getComponentType()) : SparseArrayColumnSource.getSparseMemoryColumnSource(type); resultTriggerColumns.put(name, stampDest); } - final Map> resultBaseColumns = SnapshotUtils.createColumnSourceMap( + final Map> resultBaseColumns = SnapshotUtils.createColumnSourceMap( baseTable.getColumnSourceMap(), SparseArrayColumnSource::getSparseMemoryColumnSource); - final Map> resultColumns = new LinkedHashMap<>(resultBaseColumns); + final Map> resultColumns = new LinkedHashMap<>(resultBaseColumns); resultColumns.putAll(resultTriggerColumns); if (resultColumns.size() != resultTriggerColumns.size() + resultBaseColumns.size()) { throwColumnConflictMessage(resultTriggerColumns.keySet(), resultBaseColumns.keySet()); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java index 3cfc08258db..91fbae9470e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java @@ -147,7 +147,7 @@ private static Table sparseSelect(QueryTable source, String[] preserveColumns, S } final List> inputSourcesList = new ArrayList<>(columnNames.length); - final List> outputSourcesList = new ArrayList<>(columnNames.length); + final List> outputSourcesList = new ArrayList<>(columnNames.length); final List modifiedColumnSets = new ArrayList<>(columnNames.length); for (final String columnName : columnNames) { @@ -157,7 +157,7 @@ private static Table sparseSelect(QueryTable source, String[] preserveColumns, S resultColumns.put(columnName, inputSource); } else { inputSourcesList.add(inputSource); - final SparseArrayColumnSource outputSource = SparseArrayColumnSource + final WritableColumnSource outputSource = SparseArrayColumnSource .getSparseMemoryColumnSource(inputSource.getType(), inputSource.getComponentType()); outputSourcesList.add(outputSource); resultColumns.put(columnName, outputSource); @@ -167,8 +167,8 @@ private static Table sparseSelect(QueryTable source, String[] preserveColumns, S final ColumnSource[] inputSources = inputSourcesList.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); - final SparseArrayColumnSource[] outputSources = outputSourcesList - .toArray(SparseArrayColumnSource.ZERO_LENGTH_SPARSE_ARRAY_COLUMN_SOURCE_ARRAY); + final WritableColumnSource[] outputSources = outputSourcesList + .toArray(WritableColumnSource[]::new); doCopy(source.getRowSet(), inputSources, outputSources, null); @@ -262,7 +262,7 @@ private void invert(boolean[] modifiedColumns) { }); } - private static void doShift(SafeCloseablePair shifts, SparseArrayColumnSource[] outputSources, + private static void doShift(SafeCloseablePair shifts, WritableColumnSource[] outputSources, boolean[] toShift) { if (executor == null) { doShiftSingle(shifts, outputSources, toShift); @@ -346,7 +346,7 @@ private static void doCopySource(RowSet addedAndModified, WritableColumnSource shifts, - SparseArrayColumnSource[] outputSources, + WritableColumnSource[] outputSources, boolean[] toShift) { // noinspection unchecked final WritableChunk[] values = new WritableChunk[outputSources.length]; @@ -382,7 +382,7 @@ private static void doShiftSingle(SafeCloseablePair shifts, } private static void doShiftThreads(SafeCloseablePair shifts, - SparseArrayColumnSource[] outputSources, + WritableColumnSource[] outputSources, boolean[] toShift) { final Future[] futures = new Future[outputSources.length]; for (int columnIndex = 0; columnIndex < outputSources.length; columnIndex++) { @@ -405,7 +405,7 @@ private static void doShiftThreads(SafeCloseablePair shifts, } private static void doShiftSource(SafeCloseablePair shifts, - SparseArrayColumnSource outputSource) { + WritableColumnSource outputSource) { try (final RowSequence.Iterator preIt = shifts.first.getRowSequenceIterator(); final RowSequence.Iterator postIt = shifts.second.getRowSequenceIterator(); final ChunkSink.FillFromContext ffc = diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java index 45d8b2a8ed9..e54d467eb31 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java @@ -316,7 +316,7 @@ private class ColumnInfo implements RowSetShiftData.Callback, SafeCloseable { final ModifiedColumnSet modifiedColumnSet; final ColumnSource source; - final SparseArrayColumnSource expectedSource; + final WritableColumnSource expectedSource; final ChunkEquals chunkEquals; @@ -336,6 +336,8 @@ private ColumnInfo(QueryTable tableToValidate, String columnName) { this.isPrimitive = source.getType().isPrimitive(); this.expectedSource = SparseArrayColumnSource.getSparseMemoryColumnSource(source.getType(), source.getComponentType()); + Assert.eqTrue(this.expectedSource instanceof ShiftData.RowSetShiftCallback, + "expectedSource instanceof ShiftData.RowSetShiftCallback"); this.chunkEquals = ChunkEquals.makeEqual(source.getChunkType()); } @@ -384,7 +386,8 @@ private WritableBooleanChunk equalValuesDest() { @Override public void shift(final long beginRange, final long endRange, final long shiftDelta) { - expectedSource.shift(rowSet.subSetByKeyRange(beginRange, endRange), shiftDelta); + ((ShiftData.RowSetShiftCallback)expectedSource).shift( + rowSet.subSetByKeyRange(beginRange, endRange), shiftDelta); } public void remove(final RowSet toRemove) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotIncrementalListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotIncrementalListener.java index 5103426d716..c874afca3ba 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotIncrementalListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/snapshot/SnapshotIncrementalListener.java @@ -8,11 +8,11 @@ import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.ListenerRecorder; import io.deephaven.engine.table.impl.MergedListener; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.sources.SparseArrayColumnSource; import io.deephaven.engine.table.impl.util.*; import java.util.Arrays; @@ -22,7 +22,7 @@ public class SnapshotIncrementalListener extends MergedListener { private final QueryTable triggerTable; private final QueryTable resultTable; - private final Map> resultColumns; + private final Map> resultColumns; private final ListenerRecorder baseListener; private final ListenerRecorder triggerListener; private final QueryTable baseTable; @@ -36,7 +36,7 @@ public class SnapshotIncrementalListener extends MergedListener { public SnapshotIncrementalListener( QueryTable triggerTable, QueryTable resultTable, - Map> resultColumns, + Map> resultColumns, ListenerRecorder baseListener, ListenerRecorder triggerListener, QueryTable baseTable, @@ -108,7 +108,7 @@ private void doRowCopy(RowSet rowSet) { public static void copyRowsToResult(RowSet rowsToCopy, QueryTable triggerTable, Map> snapshotDataColumns, Map> triggerColumns, - Map> resultColumns) { + Map> resultColumns) { final RowSet qtRowSet = triggerTable.getRowSet(); if (!qtRowSet.isEmpty()) { SnapshotUtils.copyStampColumns(triggerColumns, qtRowSet.lastRowKey(), resultColumns, rowsToCopy); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java index f231335870d..683213611f1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java @@ -12,8 +12,9 @@ import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.DefaultChunkSource; -import io.deephaven.engine.table.impl.chunkfillers.ChunkFiller; +import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.table.impl.util.ShiftData; import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; @@ -26,8 +27,10 @@ /** * Array-backed ColumnSource for TIME_TYPEs. Allows reinterpret as long. */ -public abstract class NanosBasedTimeSparseArraySource extends SparseArrayColumnSource - implements DefaultChunkSource, ConvertableTimeSource { +public abstract class NanosBasedTimeSparseArraySource extends AbstractDeferredGroupingColumnSource + implements FillUnordered, WritableColumnSource, InMemoryColumnSource, + PossiblyImmutableColumnSource, WritableSourceWithPrepareForParallelPopulation, ShiftData.RowSetShiftCallback, + ConvertableTimeSource { protected final LongSparseArraySource nanoSource; @@ -102,6 +105,12 @@ public void shift(final RowSet keysToShift, long shiftDelta) { public void startTrackingPrevValues() { nanoSource.startTrackingPrevValues(); } + + @Override + public void setImmutable() { + nanoSource.setImmutable(); + } + // endregion // region Chunking @@ -116,25 +125,33 @@ public Chunk getPrevChunk(@NotNull GetContext context, @NotNull RowSeque } @Override - void fillByUnRowSequence(@NotNull WritableChunk dest, - @NotNull LongChunk keys) { - nanoSource.fillByUnRowSequence(dest, keys, this::makeValue); + public boolean providesFillUnordered() { + return true; } @Override - void fillPrevByUnRowSequence(@NotNull WritableChunk dest, + public void fillChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, @NotNull LongChunk keys) { - nanoSource.fillPrevByUnRowSequence(dest, keys, this::makeValue); + nanoSource.fillByUnRowSequence(dest, keys, this::makeValue); } @Override - public void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src) { - nanoSource.fillFromChunkByRanges(rowSequence, src, this::toNanos); + public void fillPrevChunkUnordered( + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull LongChunk keys) { + nanoSource.fillPrevByUnRowSequence(dest, keys, this::makeValue); } - @Override - public void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src) { - nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); + } else { + nanoSource.fillFromChunkByRanges(rowSequence, src, this::toNanos); + } } @Override @@ -144,23 +161,18 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch } @Override - void fillByRanges(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { - nanoSource.fillByRanges(dest, rowSequence, this::makeValue); - } - - @Override - void fillByKeys(@NotNull WritableChunk dest, @NotNull RowSequence rowSequence) { - nanoSource.fillByKeys(dest, rowSequence, this::makeValue); - } - - @Override - void nullByRanges(@NotNull RowSequence rowSequence) { - nanoSource.nullByRanges(rowSequence); + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, + @NotNull RowSequence rowSequence) { + if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { + nanoSource.fillByKeys(dest, rowSequence, this::makeValue); + } else { + nanoSource.fillByRanges(dest, rowSequence, this::makeValue); + } } @Override - void nullByKeys(@NotNull RowSequence rowSequence) { - nanoSource.nullByKeys(rowSequence); + public void setNull(RowSequence rowSequence) { + nanoSource.setNull(rowSequence); } // endregion diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java index f57ae53c747..1bb133af8f5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.table.impl.util.ShiftData; import io.deephaven.util.type.ArrayTypeUtils; import io.deephaven.time.DateTime; import io.deephaven.engine.rowset.chunkattributes.RowKeys; @@ -73,9 +74,7 @@ public abstract class SparseArrayColumnSource extends AbstractDeferredGroupingColumnSource implements FillUnordered, WritableColumnSource, InMemoryColumnSource, PossiblyImmutableColumnSource, - WritableSourceWithPrepareForParallelPopulation { - public static final SparseArrayColumnSource[] ZERO_LENGTH_SPARSE_ARRAY_COLUMN_SOURCE_ARRAY = - new SparseArrayColumnSource[0]; + WritableSourceWithPrepareForParallelPopulation, ShiftData.RowSetShiftCallback { static final int DEFAULT_RECYCLER_CAPACITY = 1024; @@ -191,16 +190,12 @@ public void set(long key, short value) { throw new UnsupportedOperationException(); } - public void shift(RowSet keysToShift, long shiftDelta) { - throw new UnsupportedOperationException(); - } - public void remove(RowSet toRemove) { setNull(toRemove); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(Collection data, Class type) { - final SparseArrayColumnSource result = getSparseMemoryColumnSource(data.size(), type); + public static WritableColumnSource getSparseMemoryColumnSource(Collection data, Class type) { + final WritableColumnSource result = getSparseMemoryColumnSource(data.size(), type); long i = 0; for (T o : data) { result.set(i++, o); @@ -208,8 +203,8 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(Collect return result; } - private static SparseArrayColumnSource getSparseMemoryColumnSource(T[] data, Class type) { - final SparseArrayColumnSource result = getSparseMemoryColumnSource(data.length, type); + private static WritableColumnSource getSparseMemoryColumnSource(T[] data, Class type) { + final WritableColumnSource result = getSparseMemoryColumnSource(data.length, type); long i = 0; for (T o : data) { result.set(i++, o); @@ -277,8 +272,8 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(long[] d return result; } - public static SparseArrayColumnSource getDateTimeMemoryColumnSource(long[] data) { - final SparseArrayColumnSource result = new DateTimeSparseArraySource(); + public static WritableColumnSource getDateTimeMemoryColumnSource(long[] data) { + final WritableColumnSource result = new DateTimeSparseArraySource(); result.ensureCapacity(data.length); long i = 0; for (long o : data) { @@ -297,21 +292,21 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(short[] return result; } - public static SparseArrayColumnSource getSparseMemoryColumnSource(Class type) { + public static WritableColumnSource getSparseMemoryColumnSource(Class type) { return getSparseMemoryColumnSource(0, type, null); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(Class type, Class componentType) { + public static WritableColumnSource getSparseMemoryColumnSource(Class type, Class componentType) { return getSparseMemoryColumnSource(0, type, componentType); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, Class type) { + public static WritableColumnSource getSparseMemoryColumnSource(long size, Class type) { return getSparseMemoryColumnSource(size, type, null); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, Class type, + public static WritableColumnSource getSparseMemoryColumnSource(long size, Class type, @Nullable Class componentType) { - final SparseArrayColumnSource result; + final WritableColumnSource result; if (type == byte.class || type == Byte.class) { result = new ByteSparseArraySource(); } else if (type == char.class || type == Character.class) { @@ -343,7 +338,7 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(long si result.ensureCapacity(size); } // noinspection unchecked - return (SparseArrayColumnSource) result; + return (WritableColumnSource) result; } public static ColumnSource getSparseMemoryColumnSource(Object dataArray) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java index d830d01446a..c62fe340645 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java @@ -89,6 +89,12 @@ public final class DeltaAwareColumnSource extends AbstractColumnSource * The initial size of the delta column source. */ private static final int INITIAL_DELTA_CAPACITY = 256; + + /** + * The preferred chunk size when the column source is not chunked. + */ + private static final int DEFAULT_PREFERRED_CHUNK_SIZE = 4096; + /** * In its own coordinate space */ @@ -144,7 +150,7 @@ private interface CapacityEnsurer { public DeltaAwareColumnSource(Class type) { super(type); - final SparseArrayColumnSource sparseBaseline = + final WritableColumnSource sparseBaseline = SparseArrayColumnSource.getSparseMemoryColumnSource(getType(), null); baseline = sparseBaseline; delta = baseline; @@ -152,7 +158,11 @@ public DeltaAwareColumnSource(Class type) { baselineCapacityEnsurer = sparseBaseline::ensureCapacity; deltaCapacityEnsurer = baselineCapacityEnsurer; - preferredChunkSize = sparseBaseline.getPreferredChunkSize(); + if (sparseBaseline instanceof SparseArrayColumnSource) { + preferredChunkSize = ((SparseArrayColumnSource)sparseBaseline).getPreferredChunkSize(); + } else { + preferredChunkSize = DEFAULT_PREFERRED_CHUNK_SIZE; + } deltaCapacity = 0; deltaRows = null; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java index 478776d7730..5b09393b89f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java @@ -108,6 +108,9 @@ public interface ShiftCallback { void shift(long start, long end, long offset); } + public interface RowSetShiftCallback { + void shift(RowSet rowSet, long offset); + } public void applyDataShift(ShiftCallback shiftCallback) { int startPos = 0; From 87ed7ff0f583ee44aed24bed27e55eddc0d14f63 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 17:41:30 -0700 Subject: [PATCH 07/14] spotless --- .../io/deephaven/engine/table/impl/TableUpdateValidator.java | 2 +- .../table/impl/sources/NanosBasedTimeSparseArraySource.java | 4 ++-- .../table/impl/sources/deltaaware/DeltaAwareColumnSource.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java index e54d467eb31..f9dfbacc9a5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java @@ -386,7 +386,7 @@ private WritableBooleanChunk equalValuesDest() { @Override public void shift(final long beginRange, final long endRange, final long shiftDelta) { - ((ShiftData.RowSetShiftCallback)expectedSource).shift( + ((ShiftData.RowSetShiftCallback) expectedSource).shift( rowSet.subSetByKeyRange(beginRange, endRange), shiftDelta); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java index 683213611f1..2eecefec3bc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java @@ -146,7 +146,7 @@ public void fillPrevChunkUnordered( } public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull RowSequence rowSequence) { + @NotNull RowSequence rowSequence) { if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { nanoSource.fillFromChunkByKeys(rowSequence, src, this::toNanos); } else { @@ -162,7 +162,7 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Ch @Override public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull RowSequence rowSequence) { + @NotNull RowSequence rowSequence) { if (rowSequence.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { nanoSource.fillByKeys(dest, rowSequence, this::makeValue); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java index c62fe340645..46c88351884 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java @@ -159,7 +159,7 @@ public DeltaAwareColumnSource(Class type) { deltaCapacityEnsurer = baselineCapacityEnsurer; if (sparseBaseline instanceof SparseArrayColumnSource) { - preferredChunkSize = ((SparseArrayColumnSource)sparseBaseline).getPreferredChunkSize(); + preferredChunkSize = ((SparseArrayColumnSource) sparseBaseline).getPreferredChunkSize(); } else { preferredChunkSize = DEFAULT_PREFERRED_CHUNK_SIZE; } From 7d9f7a8c51d0f456d65605e9a4bed5ae81cac0d5 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Thu, 2 Feb 2023 20:03:41 -0700 Subject: [PATCH 08/14] Fix DynamicTableWriter#setPermissive --- .../table/impl/util/DynamicTableWriter.java | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java index 7930b5e0cf9..5c19b1d4e56 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java @@ -273,7 +273,6 @@ public void logRowPermissive(Map values) { "Incompatible logRowPermissive call: " + values.keySet() + " != " + factoryMap.keySet()); } for (final Map.Entry value : values.entrySet()) { - // noinspection unchecked getSetter(value.getKey()).setPermissive(value.getValue()); } writeRow(); @@ -295,7 +294,6 @@ public void logRowPermissive(Object... values) { + factoryMap.size()); } for (int ii = 0; ii < values.length; ++ii) { - // noinspection unchecked getSetter(ii).setPermissive(values[ii]); } writeRow(); @@ -440,7 +438,7 @@ private RowSetterImpl createRowSetter(Class type, WritableColumnSource } public interface PermissiveRowSetter extends RowSetter { - void setPermissive(T value); + void setPermissive(Object value); } private static abstract class RowSetterImpl implements PermissiveRowSetter { @@ -471,8 +469,9 @@ public void set(T value) { } @Override - public void setPermissive(T value) { - set(value); + public void setPermissive(Object value) { + // noinspection unchecked + set((T) value); } @Override @@ -552,7 +551,7 @@ public void set(Byte value) { } @Override - public void setPermissive(Byte value) { + public void setPermissive(Object value) { setByte(value == null ? QueryConstants.NULL_BYTE : ((Number) value).byteValue()); } @@ -603,7 +602,7 @@ public void set(Integer value) { } @Override - public void setPermissive(Integer value) { + public void setPermissive(Object value) { setInt(value == null ? QueryConstants.NULL_INT : ((Number) value).intValue()); } @@ -631,7 +630,7 @@ public void set(Double value) { } @Override - public void setPermissive(Double value) { + public void setPermissive(Object value) { setDouble(value == null ? QueryConstants.NULL_DOUBLE : ((Number) value).doubleValue()); } @@ -659,7 +658,7 @@ public void set(Float value) { } @Override - public void setPermissive(Float value) { + public void setPermissive(Object value) { setFloat(value == null ? QueryConstants.NULL_FLOAT : ((Number) value).floatValue()); } @@ -687,7 +686,7 @@ public void set(Long value) { } @Override - public void setPermissive(Long value) { + public void setPermissive(Object value) { setLong(value == null ? QueryConstants.NULL_LONG : ((Number) value).longValue()); } @@ -715,7 +714,7 @@ public void set(Short value) { } @Override - public void setPermissive(Short value) { + public void setPermissive(Object value) { setShort(value == null ? QueryConstants.NULL_SHORT : ((Number) value).shortValue()); } From 0bbac9ba72f0b1e850f3f5cb5e7cd2e4d237a998 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 21 Feb 2023 19:06:46 -0700 Subject: [PATCH 09/14] Ryan's feedback --- .../util/codec/ZonedDateTimeCodec.java | 9 +-- .../deephaven/engine/table/ColumnSource.java | 2 - .../table/impl/ColumnSourceGetDefaults.java | 4 +- .../table/impl/CrossJoinShiftState.java | 16 ++--- .../table/impl/CrossJoinStateManager.java | 8 +-- .../impl/MutableColumnSourceGetDefaults.java | 4 +- .../table/impl/by/AggregationProcessor.java | 8 ++- .../table/impl/sources/ArraySourceHelper.java | 6 +- .../impl/sources/BitMaskingColumnSource.java | 4 +- .../impl/sources/BitShiftingColumnSource.java | 4 +- .../impl/sources/BooleanArraySource.java | 22 +++---- .../sources/BooleanSparseArraySource.java | 6 +- .../table/impl/sources/ByteArraySource.java | 14 ++--- .../impl/sources/ByteSparseArraySource.java | 6 +- .../impl/sources/CharacterArraySource.java | 14 ++--- .../sources/CharacterSparseArraySource.java | 6 +- .../table/impl/sources/DoubleArraySource.java | 14 ++--- .../impl/sources/DoubleSparseArraySource.java | 6 +- .../table/impl/sources/FloatArraySource.java | 14 ++--- .../impl/sources/FloatSparseArraySource.java | 6 +- .../impl/sources/IntegerArraySource.java | 14 ++--- .../sources/IntegerSparseArraySource.java | 6 +- .../impl/sources/LocalDateWrapperSource.java | 8 +-- .../impl/sources/LocalTimeWrapperSource.java | 8 +-- .../table/impl/sources/LongArraySource.java | 14 ++--- .../impl/sources/LongSparseArraySource.java | 6 +- .../sources/NanosBasedTimeArraySource.java | 4 +- .../impl/sources/ObjectSparseArraySource.java | 6 +- .../table/impl/sources/ShortArraySource.java | 14 ++--- .../impl/sources/ShortSparseArraySource.java | 6 +- .../UnboxedLongBackedColumnSource.java | 8 +-- .../UnboxedTimeBackedColumnSource.java | 8 +-- .../immutable/Immutable2DByteArraySource.java | 8 +-- .../immutable/Immutable2DCharArraySource.java | 8 +-- .../Immutable2DDoubleArraySource.java | 8 +-- .../Immutable2DFloatArraySource.java | 8 +-- .../immutable/Immutable2DIntArraySource.java | 8 +-- .../immutable/Immutable2DLongArraySource.java | 8 +-- .../Immutable2DObjectArraySource.java | 8 +-- .../Immutable2DShortArraySource.java | 8 +-- .../immutable/ImmutableByteArraySource.java | 10 +-- .../immutable/ImmutableCharArraySource.java | 10 +-- .../immutable/ImmutableDoubleArraySource.java | 10 +-- .../immutable/ImmutableFloatArraySource.java | 10 +-- .../immutable/ImmutableIntArraySource.java | 10 +-- .../immutable/ImmutableLongArraySource.java | 10 +-- .../immutable/ImmutableObjectArraySource.java | 10 +-- .../immutable/ImmutableShortArraySource.java | 10 +-- .../RegionedColumnSourceDateTime.java | 11 ++-- .../regioned/RegionedColumnSourceInner.java | 62 ------------------- .../regioned/RegionedColumnSourceInstant.java | 20 +++--- .../RegionedColumnSourceZonedDateTime.java | 22 ++++--- .../AbstractBooleanColumnSourceTest.java | 4 ++ 53 files changed, 244 insertions(+), 294 deletions(-) delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInner.java diff --git a/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java b/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java index 3354616e1dd..e17599f4f9c 100644 --- a/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/ZonedDateTimeCodec.java @@ -6,6 +6,7 @@ import org.jetbrains.annotations.Nullable; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -29,7 +30,7 @@ public byte[] encode(@Nullable ZonedDateTime input) { final String zone = input.getZone().getId(); bb.putInt(zone.length()); - bb.put(zone.getBytes()); + bb.put(zone.getBytes(StandardCharsets.UTF_8)); return buf; } @@ -47,7 +48,7 @@ public ZonedDateTime decode(@NotNull byte[] input, int offset, int length) { final byte[] zidBytes = new byte[zidLen]; buf.get(zidBytes, 0, zidLen); - final String zid = new String(zidBytes); + final String zid = new String(zidBytes, StandardCharsets.UTF_8); return ZonedDateTime.ofInstant(Instant.ofEpochSecond(0, nanos), ZoneId.of(zid)); } @@ -76,7 +77,7 @@ private static int computeSize(@NotNull ZonedDateTime val) { return Long.BYTES + Integer.BYTES + val.getZone().getId().length(); } - // Sadly, this is copied from DBTimeUtils since that lives in the DB package and this cannot. + // Sadly, this is copied from DateTimeUtils, since we cannot depend on the engine-time package. private static long toEpochNano(@Nullable final ZonedDateTime value) { if (value == null) { return QueryConstants.NULL_LONG; @@ -91,6 +92,6 @@ private static long safeComputeNanos(long epochSecond, long nanoOfSecond) { "Numeric overflow detected during conversion of " + epochSecond + " to nanoseconds"); } - return epochSecond * 1_000_000_000L + nanoOfSecond; + return Math.addExact(epochSecond * 1_000_000_000L, nanoOfSecond); } } diff --git a/engine/api/src/main/java/io/deephaven/engine/table/ColumnSource.java b/engine/api/src/main/java/io/deephaven/engine/table/ColumnSource.java index c1624ee3725..2aa57fc21a2 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/ColumnSource.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/ColumnSource.java @@ -12,8 +12,6 @@ import org.jetbrains.annotations.NotNull; import javax.annotation.OverridingMethodsMustInvokeSuper; -import java.util.Collections; -import java.util.List; import java.util.Map; /** diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java index 870e7e7859b..ea9c45377e6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ColumnSourceGetDefaults.java @@ -409,8 +409,8 @@ default DateTime get(final long rowKey) { public interface ForLongAsInstant extends LongBacked { @Nullable @Override - default Instant get(long index) { - return DateTimeUtils.makeInstant(getLong(index)); + default Instant get(long rowKey) { + return DateTimeUtils.makeInstant(getLong(rowKey)); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java index c322c66d285..fe54d7da7a6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java @@ -61,20 +61,20 @@ public boolean leftOuterJoin() { return leftOuterJoin; } - public long getShifted(long index) { - return index >> getNumShiftBits(); + public long getShifted(long rowKey) { + return rowKey >> getNumShiftBits(); } - public long getPrevShifted(long index) { - return index >> getPrevNumShiftBits(); + public long getPrevShifted(long rowKey) { + return rowKey >> getPrevNumShiftBits(); } - public long getMasked(long index) { - return index & getMask(); + public long getMasked(long rowKey) { + return rowKey & getMask(); } - public long getPrevMasked(long index) { - return index & getPrevMask(); + public long getPrevMasked(long rowKey) { + return rowKey & getPrevMask(); } private long getMask() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinStateManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinStateManager.java index e9daddc0be5..c8de1fdc43d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinStateManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinStateManager.java @@ -10,10 +10,10 @@ public interface CrossJoinStateManager { TrackingRowSet getRightRowSetFromPrevLeftRow(long leftIndex); - long getShifted(long index); - long getPrevShifted(long index); - long getMasked(long index); - long getPrevMasked(long index); + long getShifted(long rowKey); + long getPrevShifted(long rowKey); + long getMasked(long rowKey); + long getPrevMasked(long rowKey); /** * If our result is a leftOuterJoin, which means that for each unmatched left row we produce one row of RHS output, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java index 88e158a00a5..27bd3b471aa 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/MutableColumnSourceGetDefaults.java @@ -411,8 +411,8 @@ default DateTime getPrev(final long rowKey) { public interface ForLongAsInstant extends ColumnSourceGetDefaults.ForLongAsInstant, LongBacked { @Nullable @Override - default Instant getPrev(long index) { - return DateTimeUtils.makeInstant(getPrevLong(index)); + default Instant getPrev(long rowKey) { + return DateTimeUtils.makeInstant(getPrevLong(rowKey)); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java index 1c7115aacbf..7ded1563260 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java @@ -95,6 +95,7 @@ import io.deephaven.engine.table.impl.by.ssmcountdistinct.unique.ShortRollupUniqueOperator; import io.deephaven.engine.table.impl.by.ssmminmax.SsmChunkedMinMaxOperator; import io.deephaven.engine.table.impl.by.ssmpercentile.SsmChunkedPercentileOperator; +import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; import io.deephaven.engine.table.impl.util.freezeby.FreezeByCountOperator; @@ -108,6 +109,8 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.time.ZoneId; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -235,7 +238,7 @@ public static AggregationContextFactory forTreeSourceRowLookup() { /** * Create a trivial {@link AggregationContextFactory} to implement {@link Table#selectDistinct select distinct}. - * + * * @return The {@link AggregationContextFactory} */ public static AggregationContextFactory forSelectDistinct() { @@ -507,6 +510,9 @@ final void addMinOrMaxOperator(final boolean isMin, @NotNull final String inputN return; } } + if (rawInputSource instanceof ConvertableTimeSource.Zoned) { + ZoneId id = ((ConvertableTimeSource.Zoned) rawInputSource).getZone(); + } addOperator(makeMinOrMaxOperator(type, resultName, isMin, isAddOnly || isStream), inputSource, inputName); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java index 1e0a065fcbd..8f294d3d49d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java @@ -195,12 +195,12 @@ final void startTrackingPrev(int numBlocks) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - final boolean shouldUsePrevious(final long index) { + final boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final long[] inUse = prevInUse[blockIndex]; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitMaskingColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitMaskingColumnSource.java index 1f967af1d33..4fc01a2604d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitMaskingColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitMaskingColumnSource.java @@ -398,9 +398,9 @@ private void ensureMaskedKeysInitialized(@NotNull final CrossJoinShiftState shif } maskedKeys.setSize(0); - rowSequence.forAllRowKeys((final long indexKey) -> { + rowSequence.forAllRowKeys((final long rowKey) -> { final long innerIndexKey = - usePrev ? shiftState.getPrevMasked(indexKey) : shiftState.getMasked(indexKey); + usePrev ? shiftState.getPrevMasked(rowKey) : shiftState.getMasked(rowKey); maskedKeys.add(innerIndexKey); }); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitShiftingColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitShiftingColumnSource.java index 4de026a49f9..f4237e1f631 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitShiftingColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BitShiftingColumnSource.java @@ -404,10 +404,10 @@ private void ensureKeysAndLengthsInitialized(@NotNull final CrossJoinShiftState final MutableInt currentRunPosition = new MutableInt(0); final MutableLong currentRunInnerIndexKey = new MutableLong(RowSequence.NULL_ROW_KEY); - rowSequence.forAllRowKeys((final long indexKey) -> { + rowSequence.forAllRowKeys((final long rowKey) -> { final long lastInnerIndexKey = currentRunInnerIndexKey.longValue(); final long innerIndexKey = - usePrev ? shiftState.getPrevShifted(indexKey) : shiftState.getShifted(indexKey); + usePrev ? shiftState.getPrevShifted(rowKey) : shiftState.getShifted(rowKey); if (innerIndexKey != lastInnerIndexKey) { if (lastInnerIndexKey != RowSequence.NULL_ROW_KEY) { uniqueIndices.set(currentRunPosition.intValue(), lastInnerIndexKey); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java index 4e8e23fffa8..0505f9caef5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java @@ -140,23 +140,23 @@ public Boolean get(long rowKey) { return BooleanUtils.byteAsBoolean(getByte(rowKey)); } - public Boolean getUnsafe(long index) { - return BooleanUtils.byteAsBoolean(getByteUnsafe(index)); + public Boolean getUnsafe(long rowKey) { + return BooleanUtils.byteAsBoolean(getByteUnsafe(rowKey)); } - public final Boolean getAndSetUnsafe(long index, Boolean newValue) { - return BooleanUtils.byteAsBoolean(getAndSetUnsafe(index, BooleanUtils.booleanAsByte(newValue))); + public final Boolean getAndSetUnsafe(long rowKey, Boolean newValue) { + return BooleanUtils.byteAsBoolean(getAndSetUnsafe(rowKey, BooleanUtils.booleanAsByte(newValue))); } - public final byte getAndSetUnsafe(long index, byte newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final byte getAndSetUnsafe(long rowKey, byte newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final byte oldValue = blocks[blockIndex][indexWithinBlock]; // not a perfect comparison, but very cheap if (oldValue == newValue) { return oldValue; } - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; @@ -171,9 +171,9 @@ public byte getByte(long rowKey) { return getByteUnsafe(rowKey); } - private byte getByteUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + private byte getByteUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java index 38c931caf4d..448b79d1c6a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java @@ -468,7 +468,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -477,12 +477,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java index b66b3563264..6087287c4c0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java @@ -154,18 +154,18 @@ public final byte getByte(long rowKey) { return getUnsafe(rowKey); } - public final byte getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final byte getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final byte getAndSetUnsafe(long index, byte newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final byte getAndSetUnsafe(long rowKey, byte newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final byte oldValue = blocks[blockIndex][indexWithinBlock]; if (!ByteComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java index cb509bdb0aa..655cb80052e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java @@ -462,7 +462,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -471,12 +471,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java index ffd1c32e29c..35bf9abdaee 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java @@ -149,18 +149,18 @@ public final char getChar(long rowKey) { return getUnsafe(rowKey); } - public final char getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final char getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final char getAndSetUnsafe(long index, char newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final char getAndSetUnsafe(long rowKey, char newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final char oldValue = blocks[blockIndex][indexWithinBlock]; if (!CharComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java index 56e51f39de5..01a13579cec 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java @@ -457,7 +457,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -466,12 +466,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java index 1b74775f1e9..546ae4a9d8f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java @@ -154,18 +154,18 @@ public final double getDouble(long rowKey) { return getUnsafe(rowKey); } - public final double getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final double getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final double getAndSetUnsafe(long index, double newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final double getAndSetUnsafe(long rowKey, double newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final double oldValue = blocks[blockIndex][indexWithinBlock]; if (!DoubleComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java index 29a8d4f0656..98e06263519 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java @@ -462,7 +462,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -471,12 +471,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java index a7b4684819e..3ba75be9ea3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java @@ -154,18 +154,18 @@ public final float getFloat(long rowKey) { return getUnsafe(rowKey); } - public final float getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final float getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final float getAndSetUnsafe(long index, float newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final float getAndSetUnsafe(long rowKey, float newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final float oldValue = blocks[blockIndex][indexWithinBlock]; if (!FloatComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java index b351de18e7b..4c8b5c27d3d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java @@ -462,7 +462,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -471,12 +471,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java index 3754c9f88e4..b5ce753508f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java @@ -154,18 +154,18 @@ public final int getInt(long rowKey) { return getUnsafe(rowKey); } - public final int getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final int getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final int getAndSetUnsafe(long index, int newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final int getAndSetUnsafe(long rowKey, int newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int oldValue = blocks[blockIndex][indexWithinBlock]; if (!IntComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java index 1e4b894d7e0..b4b110a92ee 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java @@ -462,7 +462,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -471,12 +471,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java index dba746f5c6c..112b3ba09b8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalDateWrapperSource.java @@ -70,15 +70,15 @@ protected ColumnSource doReinterpret( @Nullable @Override - public LocalDate get(long index) { - final ZonedDateTime innerVal = adjustZone(inner.get(index)); + public LocalDate get(long rowKey) { + final ZonedDateTime innerVal = adjustZone(inner.get(rowKey)); return innerVal == null ? null : innerVal.toLocalDate(); } @Nullable @Override - public LocalDate getPrev(long index) { - final ZonedDateTime innerVal = adjustZone(inner.getPrev(index)); + public LocalDate getPrev(long rowKey) { + final ZonedDateTime innerVal = adjustZone(inner.getPrev(rowKey)); return innerVal == null ? null : innerVal.toLocalDate(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java index 40b54968026..fa88887ebb8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LocalTimeWrapperSource.java @@ -70,15 +70,15 @@ protected ColumnSource doReinterpret( @Nullable @Override - public LocalTime get(long index) { - final ZonedDateTime innerVal = adjustZone(inner.get(index)); + public LocalTime get(long rowKey) { + final ZonedDateTime innerVal = adjustZone(inner.get(rowKey)); return innerVal == null ? null : innerVal.toLocalTime(); } @Nullable @Override - public LocalTime getPrev(long index) { - final ZonedDateTime innerVal = adjustZone(inner.getPrev(index)); + public LocalTime getPrev(long rowKey) { + final ZonedDateTime innerVal = adjustZone(inner.getPrev(rowKey)); return innerVal == null ? null : innerVal.toLocalTime(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java index cabb712d290..3b11b871b06 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java @@ -166,18 +166,18 @@ public final long getLong(long rowKey) { return getUnsafe(rowKey); } - public final long getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final long getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final long getAndSetUnsafe(long index, long newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final long getAndSetUnsafe(long rowKey, long newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final long oldValue = blocks[blockIndex][indexWithinBlock]; if (!LongComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java index a4bd4836a83..4455bfed040 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java @@ -473,7 +473,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -482,12 +482,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java index 1f79dbd4d39..835b1f02a31 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java @@ -76,8 +76,8 @@ public long getPrevLong(long rowKey) { return nanoSource.getPrevLong(rowKey); } - public final long getAndSetUnsafe(long index, long newValue) { - return nanoSource.getAndSetUnsafe(index, newValue); + public final long getAndSetUnsafe(long rowKey, long newValue) { + return nanoSource.getAndSetUnsafe(rowKey, newValue); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java index 39d763ad72c..4c5f3b81eb6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java @@ -449,7 +449,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -458,12 +458,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java index 195a2dd71d6..240eba87ebd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java @@ -154,18 +154,18 @@ public final short getShort(long rowKey) { return getUnsafe(rowKey); } - public final short getUnsafe(long index) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final short getUnsafe(long rowKey) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); return blocks[blockIndex][indexWithinBlock]; } - public final short getAndSetUnsafe(long index, short newValue) { - final int blockIndex = (int) (index >> LOG_BLOCK_SIZE); - final int indexWithinBlock = (int) (index & INDEX_MASK); + public final short getAndSetUnsafe(long rowKey, short newValue) { + final int blockIndex = (int) (rowKey >> LOG_BLOCK_SIZE); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final short oldValue = blocks[blockIndex][indexWithinBlock]; if (!ShortComparisons.eq(oldValue, newValue)) { - if (shouldRecordPrevious(index, prevBlocks, recycler)) { + if (shouldRecordPrevious(rowKey, prevBlocks, recycler)) { prevBlocks[blockIndex][indexWithinBlock] = oldValue; } blocks[blockIndex][indexWithinBlock] = newValue; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java index a4c62a276a3..a72bb0cc576 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java @@ -462,7 +462,7 @@ public void prepareForParallelPopulation(final RowSet changedRows) { * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a * value from its "current" data structure. */ - private boolean shouldUsePrevious(final long index) { + private boolean shouldUsePrevious(final long rowKey) { if (prevFlusher == null) { return false; } @@ -471,12 +471,12 @@ private boolean shouldUsePrevious(final long index) { return false; } - final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(index); + final long [] inUse = prevInUse.getInnermostBlockByKeyOrNull(rowKey); if (inUse == null) { return false; } - final int indexWithinBlock = (int) (index & INDEX_MASK); + final int indexWithinBlock = (int) (rowKey & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedLongBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedLongBackedColumnSource.java index 6ba7ca4ec53..83e8f9a8eaf 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedLongBackedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedLongBackedColumnSource.java @@ -19,13 +19,13 @@ public UnboxedLongBackedColumnSource(ColumnSource alternateColumnSource) { } @Override - public long getLong(long index) { - return alternateColumnSource.getLong(index); + public long getLong(long rowKey) { + return alternateColumnSource.getLong(rowKey); } @Override - public long getPrevLong(long index) { - return alternateColumnSource.getPrevLong(index); + public long getPrevLong(long rowKey) { + return alternateColumnSource.getPrevLong(rowKey); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java index de9f295679e..0525341c460 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnboxedTimeBackedColumnSource.java @@ -46,13 +46,13 @@ public UnboxedTimeBackedColumnSource(ColumnSource alternateColumnSour protected abstract long toEpochNano(TIME_TYPE val); @Override - public long getLong(long index) { - return toEpochNano(alternateColumnSource.get(index)); + public long getLong(long rowKey) { + return toEpochNano(alternateColumnSource.get(rowKey)); } @Override - public long getPrevLong(long index) { - return toEpochNano(alternateColumnSource.getPrev(index)); + public long getPrevLong(long rowKey) { + return toEpochNano(alternateColumnSource.getPrev(rowKey)); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java index bc449e38d15..e92b40a14ca 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DByteArraySource.java @@ -92,12 +92,12 @@ public final byte getByte(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final byte getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java index 4d1aed9783f..93d7bb16f18 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DCharArraySource.java @@ -85,12 +85,12 @@ public final char getChar(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final char getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java index 588407cbdef..ee4bb2de49b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DDoubleArraySource.java @@ -90,12 +90,12 @@ public final double getDouble(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final double getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java index eff951eabad..d1b6a07bed0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DFloatArraySource.java @@ -90,12 +90,12 @@ public final float getFloat(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final float getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java index ee21b03d956..2aff04a595e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DIntArraySource.java @@ -90,12 +90,12 @@ public final int getInt(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final int getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java index 33a123aafb9..080eb0a626a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DLongArraySource.java @@ -103,12 +103,12 @@ public final long getLong(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final long getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java index 9b91f4d9089..21320732854 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DObjectArraySource.java @@ -89,12 +89,12 @@ public final T get(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final T getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java index b32fe62ae1a..b665fc96331 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/Immutable2DShortArraySource.java @@ -90,12 +90,12 @@ public final short getShort(long rowKey) { return getUnsafe(rowKey); } - private int keyToSegment(long index) { - return (int)(index >> segmentShift); + private int keyToSegment(long rowKey) { + return (int)(rowKey >> segmentShift); } - private int keyToOffset(long index) { - return (int)(index & segmentMask); + private int keyToOffset(long rowKey) { + return (int)(rowKey & segmentMask); } public final short getUnsafe(long key) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java index 7d51b501ae5..49c7fcd0bdf 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableByteArraySource.java @@ -77,13 +77,13 @@ public final byte getByte(long rowKey) { return getUnsafe(rowKey); } - public final byte getUnsafe(long index) { - return data[(int)index]; + public final byte getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final byte getAndSetUnsafe(long index, byte newValue) { - byte oldValue = data[(int)index]; - data[(int)index] = newValue; + public final byte getAndSetUnsafe(long rowKey, byte newValue) { + byte oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java index 0ab7c140525..fe6702aeabe 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableCharArraySource.java @@ -70,13 +70,13 @@ public final char getChar(long rowKey) { return getUnsafe(rowKey); } - public final char getUnsafe(long index) { - return data[(int)index]; + public final char getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final char getAndSetUnsafe(long index, char newValue) { - char oldValue = data[(int)index]; - data[(int)index] = newValue; + public final char getAndSetUnsafe(long rowKey, char newValue) { + char oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java index aa4748b99fc..12e12321d32 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableDoubleArraySource.java @@ -75,13 +75,13 @@ public final double getDouble(long rowKey) { return getUnsafe(rowKey); } - public final double getUnsafe(long index) { - return data[(int)index]; + public final double getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final double getAndSetUnsafe(long index, double newValue) { - double oldValue = data[(int)index]; - data[(int)index] = newValue; + public final double getAndSetUnsafe(long rowKey, double newValue) { + double oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java index 3a636512c92..9f422432ab6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableFloatArraySource.java @@ -75,13 +75,13 @@ public final float getFloat(long rowKey) { return getUnsafe(rowKey); } - public final float getUnsafe(long index) { - return data[(int)index]; + public final float getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final float getAndSetUnsafe(long index, float newValue) { - float oldValue = data[(int)index]; - data[(int)index] = newValue; + public final float getAndSetUnsafe(long rowKey, float newValue) { + float oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java index 8b06470654e..76435e05241 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableIntArraySource.java @@ -75,13 +75,13 @@ public final int getInt(long rowKey) { return getUnsafe(rowKey); } - public final int getUnsafe(long index) { - return data[(int)index]; + public final int getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final int getAndSetUnsafe(long index, int newValue) { - int oldValue = data[(int)index]; - data[(int)index] = newValue; + public final int getAndSetUnsafe(long rowKey, int newValue) { + int oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java index 669891e854a..01aa7642347 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableLongArraySource.java @@ -88,13 +88,13 @@ public final long getLong(long rowKey) { return getUnsafe(rowKey); } - public final long getUnsafe(long index) { - return data[(int)index]; + public final long getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final long getAndSetUnsafe(long index, long newValue) { - long oldValue = data[(int)index]; - data[(int)index] = newValue; + public final long getAndSetUnsafe(long rowKey, long newValue) { + long oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java index 9c543764559..d0fc13ce4f8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableObjectArraySource.java @@ -74,13 +74,13 @@ public final T get(long rowKey) { return getUnsafe(rowKey); } - public final T getUnsafe(long index) { - return (T)data[(int)index]; + public final T getUnsafe(long rowKey) { + return (T)data[(int)rowKey]; } - public final Object getAndSetUnsafe(long index, Object newValue) { - Object oldValue = data[(int)index]; - data[(int)index] = newValue; + public final Object getAndSetUnsafe(long rowKey, Object newValue) { + Object oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java index 7e2be9c38c8..dac535d958f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableShortArraySource.java @@ -75,13 +75,13 @@ public final short getShort(long rowKey) { return getUnsafe(rowKey); } - public final short getUnsafe(long index) { - return data[(int)index]; + public final short getUnsafe(long rowKey) { + return data[(int)rowKey]; } - public final short getAndSetUnsafe(long index, short newValue) { - short oldValue = data[(int)index]; - data[(int)index] = newValue; + public final short getAndSetUnsafe(long rowKey, short newValue) { + short oldValue = data[(int)rowKey]; + data[(int)rowKey] = newValue; return oldValue; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java index 0eb4edd9eb0..d8f386021ec 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceDateTime.java @@ -35,10 +35,11 @@ public RegionedColumnSourceDateTime(@NotNull final RegionedColumnSourceLong destination, - Chunk source, RowSequence rowSequence) { - WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); - LongChunk longChunk = source.asLongChunk(); + public void convertRegion( + @NotNull final WritableChunk destination, + @NotNull final Chunk source, RowSequence rowSequence) { + final WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); + final LongChunk longChunk = source.asLongChunk(); final int size = objectChunk.size(); final int length = longChunk.size(); @@ -50,7 +51,7 @@ public void convertRegion(WritableChunk destination, } @Override - public DateTime get(long rowKey) { + public DateTime get(final long rowKey) { return rowKey == RowSequence.NULL_ROW_KEY ? null : DateTimeUtils.nanosToTime(getNativeSource().lookupRegion(rowKey).getLong(rowKey)); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInner.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInner.java deleted file mode 100644 index e47edd0b321..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInner.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.engine.table.impl.sources.regioned; - -import io.deephaven.chunk.attributes.Values; -import io.deephaven.engine.table.ColumnDefinition; -import io.deephaven.engine.table.impl.locations.ColumnLocation; -import org.jetbrains.annotations.NotNull; - -import javax.annotation.OverridingMethodsMustInvokeSuper; - -/** - *

- * Base class for column source which reaches its regions by reaching into the regions of an outer column source. These - * derive directly from {@link RegionedColumnSourceBase}, and thus don't maintain their own array of regions. - *

- * - *

- * Extending classes will typically override {@link RegionedPageStore#getRegion(int)} to reach into the outer column - * source. - *

- */ -abstract class RegionedColumnSourceInner, OUTER_DATA_TYPE, OUTER_REGION_TYPE extends ColumnRegion> - extends RegionedColumnSourceBase { - - private final RegionedColumnSourceBase outerColumnSource; - - RegionedColumnSourceInner(@NotNull Class type, - RegionedColumnSourceBase outerColumnSource) { - super(type); - this.outerColumnSource = outerColumnSource; - } - - @Override - final int addRegionForUnitTests(OTHER_REGION_TYPE region) { - return outerColumnSource.addRegionForUnitTests(region); - } - - @Override - public final int addRegion(@NotNull ColumnDefinition columnDefinition, @NotNull ColumnLocation columnLocation) { - return outerColumnSource.addRegion(columnDefinition, columnLocation); - } - - @Override - public final int getRegionCount() { - return outerColumnSource.getRegionCount(); - } - - @Override - @OverridingMethodsMustInvokeSuper - public void releaseCachedResources() { - // We are a reinterpreted column of the outer column source, so if we're asked to release our resources, release - // the real resources in the underlying column. - super.releaseCachedResources(); - getOuterColumnSource().releaseCachedResources(); - } - - final RegionedColumnSourceBase getOuterColumnSource() { - return outerColumnSource; - } -} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java index 6f1d1414fe5..719f8521c2c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceInstant.java @@ -30,17 +30,17 @@ public RegionedColumnSourceInstant() { this(new RegionedColumnSourceLong.AsValues()); } - public RegionedColumnSourceInstant(final @NotNull RegionedColumnSourceLong inner) { + public RegionedColumnSourceInstant(@NotNull final RegionedColumnSourceLong inner) { super(ColumnRegionLong.createNull(PARAMETERS.regionMask), Instant.class, inner); } @Override public void convertRegion( - WritableChunk destination, - Chunk source, - RowSequence rowSequence) { - WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); - LongChunk longChunk = source.asLongChunk(); + @NotNull final WritableChunk destination, + @NotNull final Chunk source, + @NotNull final RowSequence rowSequence) { + final WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); + final LongChunk longChunk = source.asLongChunk(); final int size = objectChunk.size(); final int length = longChunk.size(); @@ -52,9 +52,9 @@ public void convertRegion( } @Override - public Instant get(long elementIndex) { - return elementIndex == RowSequence.NULL_ROW_KEY ? null - : DateTimeUtils.makeInstant(getNativeSource().lookupRegion(elementIndex).getLong(elementIndex)); + public Instant get(final long rowKey) { + return rowKey == RowSequence.NULL_ROW_KEY ? null + : DateTimeUtils.makeInstant(getNativeSource().lookupRegion(rowKey).getLong(rowKey)); } @Override @@ -68,7 +68,7 @@ public ColumnSource toInstant() { } @Override - public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + public ColumnSource toZonedDateTime(@NotNull final ZoneId zone) { return new RegionedColumnSourceZonedDateTime(zone, (RegionedColumnSourceLong) getNativeSource()); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java index 8491d4e98ac..2ed99effdb1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/regioned/RegionedColumnSourceZonedDateTime.java @@ -38,17 +38,19 @@ public static ColumnSource asLocalTime(ZoneId zone, RegionedColumnSou return new LocalTimeWrapperSource(new RegionedColumnSourceZonedDateTime(zone, inner), zone); } - public RegionedColumnSourceZonedDateTime(final @NotNull ZoneId zone, - final @NotNull RegionedColumnSourceLong inner) { + public RegionedColumnSourceZonedDateTime( + @NotNull final ZoneId zone, + @NotNull final RegionedColumnSourceLong inner) { super(ColumnRegionLong.createNull(PARAMETERS.regionMask), ZonedDateTime.class, inner); this.zone = zone; } @Override - public void convertRegion(WritableChunk destination, - Chunk source, RowSequence rowSequence) { - WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); - LongChunk longChunk = source.asLongChunk(); + public void convertRegion( + @NotNull final WritableChunk destination, + @NotNull final Chunk source, RowSequence rowSequence) { + final WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); + final LongChunk longChunk = source.asLongChunk(); final int size = objectChunk.size(); final int length = longChunk.size(); @@ -60,9 +62,9 @@ public void convertRegion(WritableChunk destination, } @Override - public ZonedDateTime get(long elementIndex) { - return elementIndex == RowSequence.NULL_ROW_KEY ? null - : DateTimeUtils.makeZonedDateTime(getNativeSource().lookupRegion(elementIndex).getLong(elementIndex), + public ZonedDateTime get(final long rowKey) { + return rowKey == RowSequence.NULL_ROW_KEY ? null + : DateTimeUtils.makeZonedDateTime(getNativeSource().lookupRegion(rowKey).getLong(rowKey), zone); } @@ -77,7 +79,7 @@ public boolean supportsTimeConversion() { } @Override - public ColumnSource toZonedDateTime(final @NotNull ZoneId zone) { + public ColumnSource toZonedDateTime(@NotNull final ZoneId zone) { if (this.zone.equals(zone)) { return this; } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java index 0bde34b3dbe..634a3823801 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java @@ -21,8 +21,11 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.ChunkSink; +import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; +import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.updategraph.UpdateGraphProcessor; import org.jetbrains.annotations.NotNull; @@ -34,6 +37,7 @@ import java.util.Random; import static io.deephaven.util.QueryConstants.NULL_BOOLEAN; +import static junit.framework.TestCase.*; import static junit.framework.TestCase.assertEquals; public abstract class AbstractBooleanColumnSourceTest { From 14dd5e83c5ef25635e64e041a5c968871b1456a9 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 28 Feb 2023 11:19:39 -0700 Subject: [PATCH 10/14] Fix Barrage + JsAPI support of Instant/ZonedDateTime --- .../io/deephaven/util/type/TypeUtils.java | 19 ++--- .../table/impl/by/AggregationProcessor.java | 6 -- .../impl/preview/ColumnPreviewManager.java | 3 +- .../select/analyzers/SelectColumnLayer.java | 10 ++- .../impl/sources/InMemoryColumnSource.java | 1 + .../table/impl/sources/ReinterpretUtils.java | 83 +++++++++++++++++++ .../AbstractBooleanColumnSourceTest.java | 3 - .../sources/AbstractByteColumnSourceTest.java | 3 - .../AbstractCharacterColumnSourceTest.java | 3 - .../AbstractDoubleColumnSourceTest.java | 3 - .../AbstractFloatColumnSourceTest.java | 3 - .../AbstractIntegerColumnSourceTest.java | 3 - .../sources/AbstractLongColumnSourceTest.java | 3 - .../AbstractObjectColumnSourceTest.java | 3 - .../AbstractShortColumnSourceTest.java | 3 - .../barrage/table/BarrageTable.java | 2 +- .../extensions/barrage/util/BarrageUtil.java | 2 +- .../chunk/BarrageColumnRoundTripTest.java | 70 +++++++++------- .../test/FlightMessageRoundTripTest.java | 4 +- .../client/api/barrage/WebBarrageUtils.java | 2 + .../subscription/SubscriptionTableData.java | 2 + .../client/api/subscription/ViewportData.java | 2 + .../io/deephaven/web/client/fu/JsData.java | 4 +- 23 files changed, 151 insertions(+), 86 deletions(-) diff --git a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java index 8ae786a6b9e..a67f5b9d1d8 100644 --- a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java @@ -11,6 +11,7 @@ import java.math.BigDecimal; import java.math.BigInteger; import java.time.Instant; +import java.time.ZonedDateTime; import java.util.*; import java.util.stream.Collectors; @@ -513,14 +514,14 @@ public static boolean isCharacter(@NotNull final Class c) { } /** - * Whether the class is a Date, DateTime, or Instant. + * Whether the class is a DateTime, ZonedDateTime, or Instant. * * @param type The class. - * @return true if the type is a {@link Date}, DateTime or {@link Instant}. + * @return true if the type is a DateTime, {@link java.time.ZonedDateTime} or {@link Instant}. */ public static boolean isDateTime(Class type) { - return Date.class.isAssignableFrom(type) - || Instant.class.isAssignableFrom(type) + return Instant.class.isAssignableFrom(type) + || ZonedDateTime.class.isAssignableFrom(type) || (type.getAnnotation(IsDateTime.class) != null && type.getAnnotation(IsDateTime.class).value()); } @@ -544,16 +545,6 @@ public static boolean isBigNumeric(Class type) { return BigInteger.class.isAssignableFrom(type) || BigDecimal.class.isAssignableFrom(type); } - /** - * Checks if a type is primitive or {@link Serializable}. - * - * @param type the class - * @return true if the type is primitive or Serializable - */ - public static boolean isPrimitiveOrSerializable(Class type) { - return type.isPrimitive() || Serializable.class.isAssignableFrom(type); - } - /** * Checks if the type is a primitive or Boxed floate type (double or float). * diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java index 7ded1563260..715ed24638f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java @@ -95,7 +95,6 @@ import io.deephaven.engine.table.impl.by.ssmcountdistinct.unique.ShortRollupUniqueOperator; import io.deephaven.engine.table.impl.by.ssmminmax.SsmChunkedMinMaxOperator; import io.deephaven.engine.table.impl.by.ssmpercentile.SsmChunkedPercentileOperator; -import io.deephaven.engine.table.impl.sources.ConvertableTimeSource; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.ssms.SegmentedSortedMultiSet; import io.deephaven.engine.table.impl.util.freezeby.FreezeByCountOperator; @@ -109,8 +108,6 @@ import java.math.BigDecimal; import java.math.BigInteger; -import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -510,9 +507,6 @@ final void addMinOrMaxOperator(final boolean isMin, @NotNull final String inputN return; } } - if (rawInputSource instanceof ConvertableTimeSource.Zoned) { - ZoneId id = ((ConvertableTimeSource.Zoned) rawInputSource).getZone(); - } addOperator(makeMinOrMaxOperator(type, resultName, isMin, isAddOnly || isStream), inputSource, inputName); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java index b3fd5a3df69..0b8ed041b0f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/preview/ColumnPreviewManager.java @@ -98,8 +98,7 @@ public static Table applyPreview(final Table table) { // Always wrap arrays selectColumns.add(arrayPreviewFactory.makeColumn(name)); originalTypes.put(name, typeName); - } else if (!isColumnTypeDisplayable(type) - || !io.deephaven.util.type.TypeUtils.isPrimitiveOrSerializable(type)) { + } else if (!isColumnTypeDisplayable(type)) { // Always wrap non-displayable and non-serializable types selectColumns.add(nonDisplayableFactory.makeColumn(name)); originalTypes.put(name, typeName); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java index 97fbc87346a..1b57df424e5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java @@ -76,7 +76,7 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { boolean flattenedResult, boolean alreadyFlattenedSources) { super(inner, name, sc, ws, underlying, deps, mcsBuilder); this.parentRowSet = parentRowSet; - this.writableSource = (WritableColumnSource) ReinterpretUtils.maybeConvertToPrimitive(ws); + this.writableSource = ReinterpretUtils.maybeConvertToWritablePrimitive(ws); this.isRedirected = isRedirected; this.executionContext = ExecutionContext.getContextToRecord(); @@ -109,7 +109,13 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { private ChunkSource getChunkSource() { if (chunkSource == null) { - chunkSource = ReinterpretUtils.maybeConvertToPrimitive(selectColumn.getDataView()); + ColumnSource dataSource = selectColumn.getDataView(); + if (dataSource.getType() != writableSource.getType()) { + // this should only occur when using primitives internally and the user has requested a non-primitive + chunkSource = ReinterpretUtils.maybeConvertToPrimitive(dataSource); + } else { + chunkSource = dataSource; + } if (selectColumnHoldsVector) { chunkSource = new VectorChunkAdapter<>(chunkSource); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java index 5579307d3f5..d36db62dcf6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java @@ -18,6 +18,7 @@ import org.jetbrains.annotations.Nullable; import java.time.Instant; +import java.time.ZonedDateTime; /** * This is a marker interface for a column source that is entirely within memory; therefore select operations should not diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java index a454abd08cb..2ce84d18abb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java @@ -5,6 +5,7 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.engine.table.ColumnSource; +import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.time.DateTime; import org.jetbrains.annotations.NotNull; @@ -98,6 +99,67 @@ public static ColumnSource zonedDateTimeToLongSource(final @NotNull Column } } + /** + * Given a DateTime column source turn it into a long column source via reinterpretation if possible. + * + * @param source the source to turn into a long source + * + * @return the long source or null if it could not be reinterpretted + */ + public static WritableColumnSource writableDateTimeToLongSource(WritableColumnSource source) { + if (source.allowsReinterpret(long.class)) { + return (WritableColumnSource) source.reinterpret(long.class); + } + return null; + } + + /** + * Given a Boolean column source turn it into a byte column source via reinterpretation if possible. + * + * @param source the source to turn into a byte source + * + * @return the byte source or null if it could not be reinterpretted + */ + public static WritableColumnSource writableBooleanToByteSource(WritableColumnSource source) { + if (source.allowsReinterpret(byte.class)) { + return (WritableColumnSource) source.reinterpret(byte.class); + } + return null; + } + + /** + * Given an {@link Instant} column source turn it into a long column source, either via reinterpretation or + * wrapping. + * + * @param source the source to turn into a long source + * + * @return the long source or null if it could not be reinterpretted + */ + @NotNull + public static WritableColumnSource writableInstantToLongSource(final @NotNull WritableColumnSource source) { + if (source.allowsReinterpret(long.class)) { + return (WritableColumnSource) source.reinterpret(long.class); + } + return null; + } + + /** + * Given a {@link ZonedDateTime} column source turn it into a long column source, either via reinterpretation or + * wrapping. + * + * @param source the source to turn into a long source + * + * @return the long source or null if it could not be reinterpretted + */ + @NotNull + public static WritableColumnSource writableZonedDateTimeToLongSource( + final @NotNull WritableColumnSource source) { + if (source.allowsReinterpret(long.class)) { + return (WritableColumnSource) source.reinterpret(long.class); + } + return null; + } + /** * If source is something that we prefer to handle as a primitive, do the appropriate conversion. * @@ -120,6 +182,27 @@ public static ColumnSource maybeConvertToPrimitive(ColumnSource source) { return source; } + + /** + * If source is something that we prefer to handle as a primitive, do the appropriate conversion. + * + * @param source The source to convert + * @return If possible, the source converted to a writable primitive, otherwise the source + */ + public static WritableColumnSource maybeConvertToWritablePrimitive(WritableColumnSource source) { + WritableColumnSource result = null; + if (source.getType() == Boolean.class || source.getType() == boolean.class) { + result = writableBooleanToByteSource(source); + } else if (source.getType() == DateTime.class) { + result = writableDateTimeToLongSource(source); + } else if (source.getType() == Instant.class) { + result = writableInstantToLongSource(source); + } else if (source.getType() == ZonedDateTime.class) { + result = writableZonedDateTimeToLongSource(source); + } + return result == null ? source : result; + } + /** * If {@code dataType} is something that we prefer to handle as a primitive, emit the appropriate {@link ChunkType}, * else the normal ChunkType for the data type. diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java index 634a3823801..3530d9d036b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java @@ -21,8 +21,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -38,7 +36,6 @@ import static io.deephaven.util.QueryConstants.NULL_BOOLEAN; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractBooleanColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java index 794a2d80793..7af7589d9f8 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_BYTE; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractByteColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java index 9cee8657df4..e3a116567c6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java @@ -10,8 +10,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -27,7 +25,6 @@ import static io.deephaven.util.QueryConstants.NULL_CHAR; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractCharacterColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java index 6fe6ce2ebc9..b0ed0fcf41c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_DOUBLE; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractDoubleColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java index 9bc41f44018..2718508c28f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_FLOAT; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractFloatColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java index 4f2721f5e07..70010333460 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_INT; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractIntegerColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java index 842763b7175..19cff33b94a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_LONG; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractLongColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java index 35f1fbbddf1..af31855359f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -31,7 +29,6 @@ import java.util.Random; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractObjectColumnSourceTest { @NotNull diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java index 46c0a0748e0..2d68d46fe5e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java @@ -15,8 +15,6 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; -import io.deephaven.engine.table.ChunkSink; -import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; @@ -32,7 +30,6 @@ import static io.deephaven.util.QueryConstants.NULL_SHORT; import static junit.framework.TestCase.*; -import static junit.framework.TestCase.assertEquals; public abstract class AbstractShortColumnSourceTest { @NotNull diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java index 00d50e978a5..67a6ca56862 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java @@ -160,7 +160,7 @@ protected BarrageTable(final UpdateSourceRegistrar registrar, this.destSources = new WritableColumnSource[writableSources.length]; for (int ii = 0; ii < writableSources.length; ++ii) { - destSources[ii] = (WritableColumnSource) ReinterpretUtils.maybeConvertToPrimitive(writableSources[ii]); + destSources[ii] = ReinterpretUtils.maybeConvertToWritablePrimitive(writableSources[ii]); } // we always start empty, and can be notified this cycle if we are refreshed diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 7ec05158c09..44a81bd8883 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -506,7 +506,7 @@ private static ConvertedArrowSchema convertArrowSchema( private static boolean isTypeNativelySupported(final Class typ) { if (typ.isPrimitive() || TypeUtils.isBoxedType(typ) || supportedTypes.contains(typ) - || Vector.class.isAssignableFrom(typ)) { + || Vector.class.isAssignableFrom(typ) || TypeUtils.isDateTime(typ)) { return true; } if (typ.isArray()) { diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index ed56c7a26da..dd505c4b155 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -24,8 +24,10 @@ import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.WritableShortChunk; import io.deephaven.extensions.barrage.util.BarrageProtoUtil; +import io.deephaven.time.DateTime; import io.deephaven.util.BooleanUtils; import io.deephaven.util.QueryConstants; +import io.deephaven.util.SafeCloseable; import io.deephaven.vector.LongVector; import io.deephaven.vector.LongVectorDirect; import org.apache.commons.lang3.mutable.MutableInt; @@ -34,6 +36,7 @@ import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Random; import java.util.function.Consumer; @@ -261,6 +264,30 @@ public void testDoubleChunkSerialization() throws IOException { } } + public void testDateTimeChunkSerialization() throws IOException { + final Random random = new Random(0); + for (final BarrageSubscriptionOptions opts : options) { + testRoundTripSerialization(opts, DateTime.class, (utO) -> { + final WritableObjectChunk chunk = utO.asWritableObjectChunk(); + for (int i = 0; i < chunk.size(); ++i) { + chunk.set(i, i % 7 == 0 ? null : new DateTime(random.nextLong())); + } + }, new ObjectIdentityValidator<>()); + } + } + + public void testInstantChunkSerialization() throws IOException { + final Random random = new Random(0); + for (final BarrageSubscriptionOptions opts : options) { + testRoundTripSerialization(opts, Instant.class, (utO) -> { + final WritableObjectChunk chunk = utO.asWritableObjectChunk(); + for (int i = 0; i < chunk.size(); ++i) { + chunk.set(i, i % 7 == 0 ? null : Instant.ofEpochSecond(0, random.nextLong())); + } + }, new ObjectIdentityValidator<>()); + } + } + public void testObjectSerialization() throws IOException { testRoundTripSerialization(OPT_DEFAULT, Object.class, initObjectChunk(Integer::toString), new ObjectIdentityValidator<>()); @@ -343,7 +370,7 @@ private static Consumer> initObjectChunk(final IntFunc } private static void initStringArrayChunk(final WritableChunk untypedChunk) { - final Random random = new Random(); + final Random random = new Random(0); final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { @@ -361,7 +388,7 @@ private static void initStringArrayChunk(final WritableChunk untypedChun } private static void initLongArrayChunk(final WritableChunk untypedChunk) { - final Random random = new Random(); + final Random random = new Random(0); final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { @@ -379,7 +406,7 @@ private static void initLongArrayChunk(final WritableChunk untypedChunk) } private static void initLongVectorChunk(final WritableChunk untypedChunk) { - final Random random = new Random(); + final Random random = new Random(0); final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { @@ -536,35 +563,16 @@ private static void testRoundTripSerialization( chunkType = ChunkType.fromElementType(type); } - final WritableChunk data = chunkType.makeWritableChunk(4096); - - initData.accept(data); - - try (ChunkInputStreamGenerator generator = - ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, type.getComponentType(), data, 0)) { + final WritableChunk srcData = chunkType.makeWritableChunk(4096); + initData.accept(srcData); - // full sub logic - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream(); - final ChunkInputStreamGenerator.DrainableColumn column = - generator.getInputStream(options, null)) { + // The generator owns data; it is allowed to close it prematurely if the data needs to be converted to primitive + final WritableChunk data = chunkType.makeWritableChunk(4096); + data.copyFromChunk(srcData, 0, 0, srcData.size()); - final ArrayList fieldNodes = new ArrayList<>(); - column.visitFieldNodes((numElements, nullCount) -> fieldNodes - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); - final TLongArrayList bufferNodes = new TLongArrayList(); - column.visitBuffers(bufferNodes::add); - column.drainTo(baos); - final DataInput dis = - new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - try (final WritableChunk rtData = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), bufferNodes.iterator(), - dis, null, 0, 0)) { - Assert.eq(data.size(), "data.size()", rtData.size(), "rtData.size()"); - validator.assertExpected(data, rtData, null, 0); - } - } + try (SafeCloseable ignored = data; + ChunkInputStreamGenerator generator = ChunkInputStreamGenerator.makeInputStreamGenerator( + chunkType, type, type.getComponentType(), srcData, 0)) { // empty subset try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = @@ -589,7 +597,7 @@ private static void testRoundTripSerialization( } // swiss cheese subset - final Random random = new Random(); + final Random random = new Random(0); final RowSetBuilderSequential builder = RowSetFactory.builderSequential(); for (int i = 0; i < data.size(); ++i) { if (random.nextBoolean()) { diff --git a/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java b/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java index c125f140785..48287fc5dcb 100644 --- a/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java +++ b/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java @@ -535,8 +535,10 @@ public void testRoundTripData() throws Exception { } @Test - public void testTimestampColumn() throws Exception { + public void testTimestampColumns() throws Exception { assertRoundTripDataEqual(TableTools.emptyTable(10).update("tm = DateTime.now()")); + assertRoundTripDataEqual(TableTools.emptyTable(10).update("instant = java.time.Instant.now()")); + assertRoundTripDataEqual(TableTools.emptyTable(10).update("zonedDateTime = java.time.ZonedDateTime.now()")); } @Test diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index c8b01538819..53def08f484 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -428,6 +428,8 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes // longs are a special case despite being java primitives case "long": case "io.deephaven.time.DateTime": + case "java.time.Instant": + case "java.time.ZonedDateTime": assert positions.length().toFloat64() >= size * 8; long[] longArray = new long[size]; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java index f022cb2bae5..16fa93df859 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java @@ -277,6 +277,8 @@ private ArrayCopy arrayCopyFuncForColumn(@Nullable Column column) { Js.asArrayLike(destArray).setAt((int) destPos, LongWrapper.of(value)); } }; + case "java.time.Instant": + case "java.time.ZonedDateTime": case "io.deephaven.time.DateTime": return (destArray, destPos, srcArray, srcPos) -> { long value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asLong(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index 109b655cbb9..ff47842965e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -179,6 +179,8 @@ public static Object cleanData(Object dataColumn, Column column) { } return cleanData; } + case "java.time.Instant": + case "java.time.ZonedDateTime": case "io.deephaven.time.DateTime": { JsArray values = Js.uncheckedCast(dataColumn); DateWrapper[] cleanData = new DateWrapper[values.length]; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java index 179a72bbcce..45dcfaf9d2d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java @@ -21,6 +21,8 @@ public static JsArray newArray(String type) { return Js.uncheckedCast(new double[0]); case "float": return Js.uncheckedCast(new float[0]); + case "java.time.Instant": + case "java.time.ZonedDateTime": case "io.deephaven.time.DateTime": case "long": return Js.uncheckedCast(new long[0]); @@ -40,7 +42,7 @@ public static JsArray newArray(String type) { /** * Gets a required property from a JsPropertyMap. Will throw if the value isn't set - * + * * @param source The property map to get the value from * @param propertyName The property to get value for * @return The value From 2d08bf16d6956c286f16fa7c1a22be6ff0449976 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 28 Feb 2023 11:36:18 -0700 Subject: [PATCH 11/14] spotless apply --- .../deephaven/engine/table/impl/sources/ReinterpretUtils.java | 3 ++- .../extensions/barrage/chunk/BarrageColumnRoundTripTest.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java index 2ce84d18abb..5c8674efcfb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java @@ -136,7 +136,8 @@ public static WritableColumnSource writableBooleanToByteSource(WritableColumn * @return the long source or null if it could not be reinterpretted */ @NotNull - public static WritableColumnSource writableInstantToLongSource(final @NotNull WritableColumnSource source) { + public static WritableColumnSource writableInstantToLongSource( + final @NotNull WritableColumnSource source) { if (source.allowsReinterpret(long.class)) { return (WritableColumnSource) source.reinterpret(long.class); } diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index dd505c4b155..fc68983ba12 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -572,7 +572,7 @@ private static void testRoundTripSerialization( try (SafeCloseable ignored = data; ChunkInputStreamGenerator generator = ChunkInputStreamGenerator.makeInputStreamGenerator( - chunkType, type, type.getComponentType(), srcData, 0)) { + chunkType, type, type.getComponentType(), srcData, 0)) { // empty subset try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = From 7b5f406da9f9c11a5b981954a7fd510a6f4b2142 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Tue, 28 Feb 2023 11:53:00 -0700 Subject: [PATCH 12/14] Add Instant/ZonedDateTime to JsDataHandler, too --- .../java/io/deephaven/web/client/api/parse/JsDataHandler.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java index f03f36a1f26..60b560419c7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java @@ -96,7 +96,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, addNode.apply(new Node(data.length, nullCount)); } }, - DATE_TIME(Type.Int, "io.deephaven.time.DateTime", "datetime") { + DATE_TIME(Type.Int, "io.deephaven.time.DateTime", "datetime", "java.time.Instant", "java.time.ZonedDateTime") { // Ensures that the 'T' separator character is in the date time private String ensureSeparator(String s) { if (s.charAt(SEPARATOR_INDEX) == ' ') { @@ -466,7 +466,7 @@ private static void writeSimpleNumbers(Object[] data, JsConsumer addNode, /** * Helper to read some js value as a double, so it can be handled as some type narrower than a js number. Do not use * this to handle wider types, check each possible type and fallback to this. - * + * * @param data the data to turn into a js number * @return null or a java double */ From e72f60f568e38d92063d0e8c90b746efc45dc034 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Wed, 1 Mar 2023 16:58:52 -0700 Subject: [PATCH 13/14] add back accidentally rm test --- .../chunk/BarrageColumnRoundTripTest.java | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index fc68983ba12..76f28f8bd1e 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -573,6 +573,29 @@ private static void testRoundTripSerialization( try (SafeCloseable ignored = data; ChunkInputStreamGenerator generator = ChunkInputStreamGenerator.makeInputStreamGenerator( chunkType, type, type.getComponentType(), srcData, 0)) { + // full sub logic + try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = + new BarrageProtoUtil.ExposedByteArrayOutputStream(); + final ChunkInputStreamGenerator.DrainableColumn column = + generator.getInputStream(options, null)) { + + + final ArrayList fieldNodes = new ArrayList<>(); + column.visitFieldNodes((numElements, nullCount) -> fieldNodes + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); + final TLongArrayList bufferNodes = new TLongArrayList(); + column.visitBuffers(bufferNodes::add); + column.drainTo(baos); + final DataInput dis = + new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); + try (final WritableChunk rtData = + ChunkInputStreamGenerator.extractChunkFromInputStream(options, + chunkType, type, type.getComponentType(), fieldNodes.iterator(), bufferNodes.iterator(), + dis, null, 0, 0)) { + Assert.eq(data.size(), "data.size()", rtData.size(), "rtData.size()"); + validator.assertExpected(data, rtData, null, 0); + } + } // empty subset try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = From a5a6ec7756181e8872702980d7a59cd874572040 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Fri, 3 Mar 2023 12:51:28 -0700 Subject: [PATCH 14/14] ryan's rnd2 feedback --- .../engine/table/impl/BucketingContext.java | 12 ++- .../table/impl/by/AggregationProcessor.java | 3 +- .../impl/select/ReinterpretedColumn.java | 9 ++- .../select/analyzers/SelectColumnLayer.java | 7 +- .../impl/sources/InMemoryColumnSource.java | 1 - .../table/impl/sources/ReinterpretUtils.java | 75 +++++++++---------- .../io/deephaven/engine/util/TableTools.java | 3 +- .../parquet/table/ParquetTableWriter.java | 8 +- 8 files changed, 63 insertions(+), 55 deletions(-) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/BucketingContext.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/BucketingContext.java index 32299831648..fa868276e76 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/BucketingContext.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/BucketingContext.java @@ -73,11 +73,15 @@ class BucketingContext implements SafeCloseable { } if (leftType == DateTime.class) { - leftSources[ii] = ReinterpretUtils.dateTimeToLongSource(leftSources[ii]); - rightSources[ii] = ReinterpretUtils.dateTimeToLongSource(rightSources[ii]); + // noinspection unchecked + leftSources[ii] = ReinterpretUtils.dateTimeToLongSource((ColumnSource) leftSources[ii]); + // noinspection unchecked + rightSources[ii] = ReinterpretUtils.dateTimeToLongSource((ColumnSource) rightSources[ii]); } else if (leftType == boolean.class || leftType == Boolean.class) { - leftSources[ii] = ReinterpretUtils.booleanToByteSource(leftSources[ii]); - rightSources[ii] = ReinterpretUtils.booleanToByteSource(rightSources[ii]); + // noinspection unchecked + leftSources[ii] = ReinterpretUtils.booleanToByteSource((ColumnSource) leftSources[ii]); + // noinspection unchecked + rightSources[ii] = ReinterpretUtils.booleanToByteSource((ColumnSource) rightSources[ii]); if (leftSources.length == 1) { uniqueValues = true; maximumUniqueValue = BooleanUtils.TRUE_BOOLEAN_AS_BYTE; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java index 715ed24638f..04a3c2fc451 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationProcessor.java @@ -1373,8 +1373,9 @@ private static AggregationContext makeEmptyAggregationContext(final boolean requ } private static ColumnSource maybeReinterpretDateTimeAsLong(@NotNull final ColumnSource inputSource) { + // noinspection unchecked return inputSource.getType() == DateTime.class - ? ReinterpretUtils.dateTimeToLongSource(inputSource) + ? ReinterpretUtils.dateTimeToLongSource((ColumnSource) inputSource) : inputSource; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java index d79723ce52c..eb215fa5024 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ReinterpretedColumn.java @@ -240,11 +240,14 @@ public ColumnSource getDataView() { // directly. final ColumnSource intermediate; if (sourceDataType == DateTime.class) { - intermediate = ReinterpretUtils.dateTimeToLongSource(sourceColumnSource); + // noinspection unchecked + intermediate = ReinterpretUtils.dateTimeToLongSource((ColumnSource) sourceColumnSource); } else if (sourceDataType == Instant.class) { - intermediate = ReinterpretUtils.instantToLongSource(sourceColumnSource); + // noinspection unchecked + intermediate = ReinterpretUtils.instantToLongSource((ColumnSource) sourceColumnSource); } else if (sourceDataType == ZonedDateTime.class) { - intermediate = ReinterpretUtils.zonedDateTimeToLongSource(sourceColumnSource); + // noinspection unchecked + intermediate = ReinterpretUtils.zonedDateTimeToLongSource((ColumnSource) sourceColumnSource); } else if (sourceDataType == long.class || sourceDataType == Long.class) { // noinspection unchecked intermediate = (ColumnSource) sourceColumnSource; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java index 1b57df424e5..f87f3074883 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java @@ -112,10 +112,11 @@ private ChunkSource getChunkSource() { ColumnSource dataSource = selectColumn.getDataView(); if (dataSource.getType() != writableSource.getType()) { // this should only occur when using primitives internally and the user has requested a non-primitive - chunkSource = ReinterpretUtils.maybeConvertToPrimitive(dataSource); - } else { - chunkSource = dataSource; + dataSource = ReinterpretUtils.maybeConvertToPrimitive(dataSource); + Assert.eq(dataSource.getType(), "dataSource.getType()", + writableSource.getType(), "writableSource.getType()"); } + chunkSource = dataSource; if (selectColumnHoldsVector) { chunkSource = new VectorChunkAdapter<>(chunkSource); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java index d36db62dcf6..5579307d3f5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/InMemoryColumnSource.java @@ -18,7 +18,6 @@ import org.jetbrains.annotations.Nullable; import java.time.Instant; -import java.time.ZonedDateTime; /** * This is a marker interface for a column source that is entirely within memory; therefore select operations should not diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java index 5c8674efcfb..dcf1ead2ba4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java @@ -22,12 +22,11 @@ public class ReinterpretUtils { * * @return the long source */ - public static ColumnSource dateTimeToLongSource(ColumnSource source) { + public static ColumnSource dateTimeToLongSource(ColumnSource source) { if (source.allowsReinterpret(long.class)) { return source.reinterpret(long.class); } else { - // noinspection unchecked - return new DateTimeAsLongColumnSource((ColumnSource) source); + return new DateTimeAsLongColumnSource(source); } } @@ -38,12 +37,11 @@ public static ColumnSource dateTimeToLongSource(ColumnSource source) { * * @return the long source */ - public static ColumnSource longToDateTimeSource(ColumnSource source) { + public static ColumnSource longToDateTimeSource(ColumnSource source) { if (source.allowsReinterpret(DateTime.class)) { return source.reinterpret(DateTime.class); } else { - // noinspection unchecked - return new LongAsDateTimeColumnSource((ColumnSource) source); + return new LongAsDateTimeColumnSource(source); } } @@ -54,12 +52,11 @@ public static ColumnSource longToDateTimeSource(ColumnSource source * * @return the byte source */ - public static ColumnSource booleanToByteSource(ColumnSource source) { + public static ColumnSource booleanToByteSource(ColumnSource source) { if (source.allowsReinterpret(byte.class)) { return source.reinterpret(byte.class); } else { - // noinspection unchecked - return new BooleanAsByteColumnSource((ColumnSource) source); + return new BooleanAsByteColumnSource(source); } } @@ -72,12 +69,11 @@ public static ColumnSource booleanToByteSource(ColumnSource source) { * @return the long source */ @NotNull - public static ColumnSource instantToLongSource(final @NotNull ColumnSource source) { + public static ColumnSource instantToLongSource(final @NotNull ColumnSource source) { if (source.allowsReinterpret(long.class)) { return source.reinterpret(long.class); } else { - // noinspection unchecked - return new InstantAsLongColumnSource((ColumnSource) source); + return new InstantAsLongColumnSource(source); } } @@ -90,23 +86,23 @@ public static ColumnSource instantToLongSource(final @NotNull ColumnSource * @return the long source */ @NotNull - public static ColumnSource zonedDateTimeToLongSource(final @NotNull ColumnSource source) { + public static ColumnSource zonedDateTimeToLongSource(final @NotNull ColumnSource source) { if (source.allowsReinterpret(long.class)) { return source.reinterpret(long.class); } else { - // noinspection unchecked - return new ZonedDateTimeAsLongSource((ColumnSource) source); + return new ZonedDateTimeAsLongSource(source); } } /** - * Given a DateTime column source turn it into a long column source via reinterpretation if possible. + * Given a writable DateTime column source turn it into a writable long column source via reinterpretation if + * possible. * * @param source the source to turn into a long source * - * @return the long source or null if it could not be reinterpretted + * @return the long source or null if it could not be reinterpreted */ - public static WritableColumnSource writableDateTimeToLongSource(WritableColumnSource source) { + public static WritableColumnSource writableDateTimeToLongSource(WritableColumnSource source) { if (source.allowsReinterpret(long.class)) { return (WritableColumnSource) source.reinterpret(long.class); } @@ -114,13 +110,14 @@ public static WritableColumnSource writableDateTimeToLongSource(WritableColum } /** - * Given a Boolean column source turn it into a byte column source via reinterpretation if possible. + * Given a writable Boolean column source turn it into a writable byte column source via reinterpretation if + * possible. * * @param source the source to turn into a byte source * - * @return the byte source or null if it could not be reinterpretted + * @return the byte source or null if it could not be reinterpreted */ - public static WritableColumnSource writableBooleanToByteSource(WritableColumnSource source) { + public static WritableColumnSource writableBooleanToByteSource(WritableColumnSource source) { if (source.allowsReinterpret(byte.class)) { return (WritableColumnSource) source.reinterpret(byte.class); } @@ -128,16 +125,15 @@ public static WritableColumnSource writableBooleanToByteSource(WritableColumn } /** - * Given an {@link Instant} column source turn it into a long column source, either via reinterpretation or - * wrapping. + * Given a writable {@link Instant} column source turn it into a writable long column source via reinterpretation if + * possible. * * @param source the source to turn into a long source * - * @return the long source or null if it could not be reinterpretted + * @return the long source or null if it could not be reinterpreted */ - @NotNull public static WritableColumnSource writableInstantToLongSource( - final @NotNull WritableColumnSource source) { + final @NotNull WritableColumnSource source) { if (source.allowsReinterpret(long.class)) { return (WritableColumnSource) source.reinterpret(long.class); } @@ -145,16 +141,15 @@ public static WritableColumnSource writableInstantToLongSource( } /** - * Given a {@link ZonedDateTime} column source turn it into a long column source, either via reinterpretation or - * wrapping. + * Given a writable {@link ZonedDateTime} column source turn it into a writable long column source via + * reinterpretation if possible. * * @param source the source to turn into a long source * - * @return the long source or null if it could not be reinterpretted + * @return the long source or null if it could not be reinterpreted */ - @NotNull public static WritableColumnSource writableZonedDateTimeToLongSource( - final @NotNull WritableColumnSource source) { + final @NotNull WritableColumnSource source) { if (source.allowsReinterpret(long.class)) { return (WritableColumnSource) source.reinterpret(long.class); } @@ -167,18 +162,19 @@ public static WritableColumnSource writableZonedDateTimeToLongSource( * @param source The source to convert * @return If possible, the source converted to a primitive, otherwise the source */ + @SuppressWarnings("unchecked") public static ColumnSource maybeConvertToPrimitive(ColumnSource source) { if (source.getType() == Boolean.class || source.getType() == boolean.class) { - return booleanToByteSource(source); + return booleanToByteSource((ColumnSource) source); } if (source.getType() == DateTime.class) { - return dateTimeToLongSource(source); + return dateTimeToLongSource((ColumnSource) source); } if (source.getType() == Instant.class) { - return instantToLongSource(source); + return instantToLongSource((ColumnSource) source); } if (source.getType() == ZonedDateTime.class) { - return zonedDateTimeToLongSource(source); + return zonedDateTimeToLongSource((ColumnSource) source); } return source; } @@ -190,16 +186,17 @@ public static ColumnSource maybeConvertToPrimitive(ColumnSource source) { * @param source The source to convert * @return If possible, the source converted to a writable primitive, otherwise the source */ + @SuppressWarnings("unchecked") public static WritableColumnSource maybeConvertToWritablePrimitive(WritableColumnSource source) { WritableColumnSource result = null; if (source.getType() == Boolean.class || source.getType() == boolean.class) { - result = writableBooleanToByteSource(source); + result = writableBooleanToByteSource((WritableColumnSource) source); } else if (source.getType() == DateTime.class) { - result = writableDateTimeToLongSource(source); + result = writableDateTimeToLongSource((WritableColumnSource) source); } else if (source.getType() == Instant.class) { - result = writableInstantToLongSource(source); + result = writableInstantToLongSource((WritableColumnSource) source); } else if (source.getType() == ZonedDateTime.class) { - result = writableZonedDateTimeToLongSource(source); + result = writableZonedDateTimeToLongSource((WritableColumnSource) source); } return result == null ? source : result; } diff --git a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java index 3536f158415..ab8d78e5e0b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java @@ -1156,7 +1156,8 @@ public static String base64Fingerprint(Table source) throws IOException { private static void processColumnForFingerprint(RowSequence ok, ColumnSource col, DataOutputStream outputStream) throws IOException { if (col.getType() == DateTime.class) { - col = ReinterpretUtils.dateTimeToLongSource(col); + // noinspection unchecked + col = ReinterpretUtils.dateTimeToLongSource((ColumnSource) col); } final int chunkSize = 1 << 16; diff --git a/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTableWriter.java b/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTableWriter.java index 76175c422d9..d8d2e44ed79 100644 --- a/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTableWriter.java +++ b/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTableWriter.java @@ -86,7 +86,7 @@ enum CacheTags { /** * Classes that implement this interface are responsible for converting data from individual DH columns into buffers * to be written out to the Parquet file. - * + * * @param */ interface TransferObject extends SafeCloseable { @@ -514,11 +514,13 @@ static void writeColumnSource( Class columnType = columnSource.getType(); if (columnType == DateTime.class) { // noinspection unchecked - columnSource = (ColumnSource) ReinterpretUtils.dateTimeToLongSource(columnSource); + columnSource = (ColumnSource) ReinterpretUtils.dateTimeToLongSource( + (ColumnSource) columnSource); columnType = columnSource.getType(); } else if (columnType == Boolean.class) { // noinspection unchecked - columnSource = (ColumnSource) ReinterpretUtils.booleanToByteSource(columnSource); + columnSource = (ColumnSource) ReinterpretUtils.booleanToByteSource( + (ColumnSource) columnSource); } try (final ColumnWriter columnWriter = rowGroupWriter.addColumn(