From 8876208a498d69ff68ef3fb21274667b893b1e48 Mon Sep 17 00:00:00 2001 From: Nathaniel Bauernfeind Date: Mon, 16 Sep 2024 17:58:53 -0600 Subject: [PATCH] Ryan's feedback and bug fixes --- .../java/io/deephaven/chunk/BooleanChunk.java | 6 +- .../java/io/deephaven/chunk/ByteChunk.java | 4 +- .../java/io/deephaven/chunk/CharChunk.java | 4 +- .../main/java/io/deephaven/chunk/Chunk.java | 6 - .../java/io/deephaven/chunk/DoubleChunk.java | 4 +- .../java/io/deephaven/chunk/FloatChunk.java | 4 +- .../java/io/deephaven/chunk/IntChunk.java | 4 +- .../java/io/deephaven/chunk/LongChunk.java | 4 +- .../java/io/deephaven/chunk/ObjectChunk.java | 4 +- .../java/io/deephaven/chunk/ShortChunk.java | 4 +- .../engine/table/impl/QueryTable.java | 1 - .../table/impl/sources/ReinterpretUtils.java | 2 + .../barrage/BarrageMessageWriterImpl.java | 7 +- .../extensions/barrage/BarrageOptions.java | 39 ++ .../barrage/BarrageSnapshotOptions.java | 3 +- .../barrage/BarrageSubscriptionOptions.java | 3 +- .../extensions/barrage/BarrageTypeInfo.java | 55 ++ .../extensions/barrage/ChunkListWriter.java | 3 +- .../barrage/chunk/BaseChunkReader.java | 4 +- .../barrage/chunk/BaseChunkWriter.java | 78 +-- .../barrage/chunk/BooleanChunkWriter.java | 23 +- .../barrage/chunk/ByteChunkReader.java | 16 +- .../barrage/chunk/ByteChunkWriter.java | 30 +- .../barrage/chunk/CharChunkReader.java | 15 +- .../barrage/chunk/CharChunkWriter.java | 29 +- .../extensions/barrage/chunk/ChunkReader.java | 87 +--- .../extensions/barrage/chunk/ChunkWriter.java | 39 +- .../chunk/DefaultChunkReaderFactory.java | 226 ++++---- .../chunk/DefaultChunkWriterFactory.java | 481 ++++++++++-------- .../barrage/chunk/DoubleChunkReader.java | 42 +- .../barrage/chunk/DoubleChunkWriter.java | 30 +- .../barrage/chunk/FixedWidthChunkReader.java | 5 +- .../barrage/chunk/FixedWidthChunkWriter.java | 22 +- .../barrage/chunk/FloatChunkReader.java | 17 +- .../barrage/chunk/FloatChunkWriter.java | 30 +- .../barrage/chunk/IntChunkReader.java | 16 +- .../barrage/chunk/IntChunkWriter.java | 30 +- .../barrage/chunk/ListChunkWriter.java | 11 +- .../barrage/chunk/LongChunkReader.java | 17 +- .../barrage/chunk/LongChunkWriter.java | 30 +- .../barrage/chunk/NullChunkWriter.java | 6 +- .../barrage/chunk/ShortChunkReader.java | 16 +- .../barrage/chunk/ShortChunkWriter.java | 30 +- .../barrage/chunk/VarBinaryChunkWriter.java | 48 +- .../barrage/util/ArrowToTableConverter.java | 4 +- .../barrage/util/BarrageMessageReader.java | 4 +- .../util/BarrageMessageReaderImpl.java | 10 +- .../extensions/barrage/util/BarrageUtil.java | 9 +- .../extensions/barrage/Barrage.gwt.xml | 2 +- .../chunk/BarrageColumnRoundTripTest.java | 21 +- .../replicators/ReplicateBarrageUtils.java | 32 +- .../ReplicateSourcesAndChunks.java | 4 + .../barrage/BarrageMessageProducer.java | 5 +- .../HierarchicalTableViewSubscription.java | 3 +- .../api/barrage/WebBarrageMessageReader.java | 6 +- .../api/barrage/WebChunkReaderFactory.java | 8 +- .../web/client/state/ClientTableState.java | 4 +- 57 files changed, 913 insertions(+), 734 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageOptions.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageTypeInfo.java diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/BooleanChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/BooleanChunk.java index fa29e8d40eb..081b15bc844 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/BooleanChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/BooleanChunk.java @@ -75,9 +75,11 @@ public final boolean get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { - return data[offset + index] == QueryConstants.NULL_BOOLEAN; + // region isNull + public final boolean isNull(int index) { + return false; } + // endregion isNull @Override public BooleanChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ByteChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ByteChunk.java index 92e58b73909..746b48b1557 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ByteChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ByteChunk.java @@ -79,9 +79,11 @@ public final byte get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_BYTE; } + // endregion isNull @Override public ByteChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/CharChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/CharChunk.java index 97d04681f91..97e184755bd 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/CharChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/CharChunk.java @@ -74,9 +74,11 @@ public final char get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_CHAR; } + // endregion isNull @Override public CharChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java index 7d9c5e03605..35e152dcd0f 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java @@ -109,12 +109,6 @@ default void copyToBuffer(int srcOffset, @NotNull Buffer destBuffer, int destOff */ int size(); - /** - * @return whether The value offset is null - * @param index The index to check - */ - boolean isNullAt(int index); - /** * @return The underlying chunk type */ diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/DoubleChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/DoubleChunk.java index b53a08921ef..c0b35fde54e 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/DoubleChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/DoubleChunk.java @@ -78,9 +78,11 @@ public final double get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_DOUBLE; } + // endregion isNull @Override public DoubleChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/FloatChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/FloatChunk.java index 806adf6e10e..dfd68f81b75 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/FloatChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/FloatChunk.java @@ -78,9 +78,11 @@ public final float get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_FLOAT; } + // endregion isNull @Override public FloatChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/IntChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/IntChunk.java index c5c46e591e6..3296deacad2 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/IntChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/IntChunk.java @@ -78,9 +78,11 @@ public final int get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_INT; } + // endregion isNull @Override public IntChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/LongChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/LongChunk.java index 9b34855dfc3..3a6f21461fc 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/LongChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/LongChunk.java @@ -78,9 +78,11 @@ public final long get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_LONG; } + // endregion isNull @Override public LongChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ObjectChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ObjectChunk.java index 49ac3556670..4bfa0a20dfb 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ObjectChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ObjectChunk.java @@ -78,9 +78,11 @@ public final T get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == null; } + // endregion isNull @Override public ObjectChunk slice(int offset, int capacity) { diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ShortChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ShortChunk.java index 12cb89b260c..5e8fa290986 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ShortChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ShortChunk.java @@ -78,9 +78,11 @@ public final short get(int index) { return data[offset + index]; } - public final boolean isNullAt(int index) { + // region isNull + public final boolean isNull(int index) { return data[offset + index] == QueryConstants.NULL_SHORT; } + // endregion isNull @Override public ShortChunk slice(int offset, int capacity) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java index 6629db9ab2a..1c227ea3ae7 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java @@ -2613,7 +2613,6 @@ private Table snapshotIncrementalInternal(final Table base, final boolean doInit new ListenerRecorder("snapshotIncremental (triggerTable)", this, resultTable); addUpdateListener(triggerListenerRecorder); - dropColumns(getColumnSourceMap().keySet()); final SnapshotIncrementalListener listener = new SnapshotIncrementalListener(this, resultTable, resultColumns, baseListenerRecorder, triggerListenerRecorder, baseTable, triggerColumns); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java index 5b2ab07a8b4..012f783c53c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ReinterpretUtils.java @@ -265,6 +265,7 @@ public static ChunkType maybeConvertToWritablePrimitiveChunkType(@NotNull final } if (dataType == Instant.class) { // Note that storing ZonedDateTime as a primitive is lossy on the time zone. + // TODO (https://github.com/deephaven/deephaven-core/issues/5241): Inconsistent handling of ZonedDateTime return ChunkType.Long; } return ChunkType.fromElementType(dataType); @@ -284,6 +285,7 @@ public static Class maybeConvertToPrimitiveDataType(@NotNull final Class d } if (dataType == Instant.class || dataType == ZonedDateTime.class) { // Note: not all ZonedDateTime sources are convertible to long, so this doesn't match column source behavior + // TODO (https://github.com/deephaven/deephaven-core/issues/5241): Inconsistent handling of ZonedDateTime return long.class; } return dataType; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageMessageWriterImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageMessageWriterImpl.java index aad8b0e242e..9f93245daed 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageMessageWriterImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageMessageWriterImpl.java @@ -25,7 +25,6 @@ import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.impl.ExternalizableRowSetUtils; import io.deephaven.engine.table.impl.util.BarrageMessage; -import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderWriter; import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; @@ -75,7 +74,7 @@ public class BarrageMessageWriterImpl implements BarrageMessageWriter { public interface RecordBatchMessageView extends MessageView { boolean isViewport(); - ChunkReader.Options options(); + BarrageOptions options(); RowSet addRowOffsets(); @@ -354,7 +353,7 @@ public boolean isViewport() { } @Override - public ChunkReader.Options options() { + public BarrageOptions options() { return options; } @@ -533,7 +532,7 @@ public boolean isViewport() { } @Override - public ChunkReader.Options options() { + public BarrageOptions options() { return options; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageOptions.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageOptions.java new file mode 100644 index 00000000000..6674d788c29 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageOptions.java @@ -0,0 +1,39 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage; + +import io.deephaven.util.QueryConstants; + +public interface BarrageOptions { + /** + * @return whether we encode the validity buffer to express null values or {@link QueryConstants QueryConstants'} + * NULL values. + */ + boolean useDeephavenNulls(); + + /** + * @return the conversion mode to use for object columns + */ + ColumnConversionMode columnConversionMode(); + + /** + * @return the ideal number of records to send per record batch + */ + int batchSize(); + + /** + * @return the maximum number of bytes that should be sent in a single message. + */ + int maxMessageSize(); + + /** + * Some Flight clients cannot handle modifications that have irregular column counts. These clients request that the + * server wrap all columns in a list to enable each column having a variable length. + * + * @return true if the columns should be wrapped in a list + */ + default boolean columnsAsList() { + return false; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSnapshotOptions.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSnapshotOptions.java index 01b16021630..b6a0f931dc3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSnapshotOptions.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSnapshotOptions.java @@ -6,13 +6,12 @@ import com.google.flatbuffers.FlatBufferBuilder; import io.deephaven.annotations.BuildableStyle; import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; -import io.deephaven.extensions.barrage.chunk.ChunkReader; import org.immutables.value.Value.Default; import org.immutables.value.Value.Immutable; @Immutable @BuildableStyle -public abstract class BarrageSnapshotOptions implements ChunkReader.Options { +public abstract class BarrageSnapshotOptions implements BarrageOptions { public static Builder builder() { return ImmutableBarrageSnapshotOptions.builder(); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java index e7ef80e591d..86920289c1f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java @@ -6,13 +6,12 @@ import com.google.flatbuffers.FlatBufferBuilder; import io.deephaven.annotations.BuildableStyle; import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; -import io.deephaven.extensions.barrage.chunk.ChunkReader; import org.immutables.value.Value.Default; import org.immutables.value.Value.Immutable; @Immutable @BuildableStyle -public abstract class BarrageSubscriptionOptions implements ChunkReader.Options { +public abstract class BarrageSubscriptionOptions implements BarrageOptions { public static Builder builder() { return ImmutableBarrageSubscriptionOptions.builder(); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageTypeInfo.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageTypeInfo.java new file mode 100644 index 00000000000..18d5d9f0c22 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageTypeInfo.java @@ -0,0 +1,55 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage; + +import org.apache.arrow.flatbuf.Field; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +/** + * Describes type info used by factory implementations when creating a ChunkReader. + */ +public class BarrageTypeInfo { + /** + * Factory method to create a TypeInfo instance. + * + * @param type the Java type to be read into the chunk + * @param componentType the Java type of nested components + * @param arrowField the Arrow type to be read into the chunk + * @return a TypeInfo instance + */ + public static BarrageTypeInfo make( + @NotNull final Class type, + @Nullable final Class componentType, + @NotNull final Field arrowField) { + return new BarrageTypeInfo(type, componentType, arrowField); + } + + private final Class type; + @Nullable + private final Class componentType; + private final Field arrowField; + + public BarrageTypeInfo( + @NotNull final Class type, + @Nullable final Class componentType, + @NotNull final Field arrowField) { + this.type = type; + this.componentType = componentType; + this.arrowField = arrowField; + } + + public Class type() { + return type; + } + + @Nullable + public Class componentType() { + return componentType; + } + + public Field arrowField() { + return arrowField; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListWriter.java index d579f28b6a1..f8700fd57a3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListWriter.java @@ -5,7 +5,6 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; @@ -41,7 +40,7 @@ public ChunkWriter.Context[] chunks() { return contexts; } - public ChunkWriter.DrainableColumn empty(@NotNull final ChunkReader.Options options) throws IOException { + public ChunkWriter.DrainableColumn empty(@NotNull final BarrageOptions options) throws IOException { return writer.getEmptyInputStream(options); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkReader.java index 3391cf72340..eef29da34e5 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkReader.java @@ -10,8 +10,8 @@ import java.util.function.Function; import java.util.function.IntFunction; -public abstract class BaseChunkReader> - implements ChunkReader { +public abstract class BaseChunkReader> + implements ChunkReader { protected static > T castOrCreateChunk( final WritableChunk outChunk, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkWriter.java index d67c6df22a5..d36ebd73834 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkWriter.java @@ -9,6 +9,7 @@ import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSequenceFactory; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -17,48 +18,59 @@ import java.io.IOException; import java.util.function.Supplier; -public abstract class BaseChunkWriter> implements ChunkWriter { +public abstract class BaseChunkWriter> + implements ChunkWriter { + @FunctionalInterface + public interface IsRowNullProvider> { + boolean isRowNull(SOURCE_CHUNK_TYPE chunk, int idx); + } public static final byte[] PADDING_BUFFER = new byte[8]; public static final int REMAINDER_MOD_8_MASK = 0x7; - protected final Supplier emptyChunkSupplier; + protected final IsRowNullProvider isRowNullProvider; + protected final Supplier emptyChunkSupplier; protected final int elementSize; + /** whether we can use the wire value as a deephaven null for clients that support dh nulls */ protected final boolean dhNullable; BaseChunkWriter( - final Supplier emptyChunkSupplier, + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, final int elementSize, final boolean dhNullable) { + this.isRowNullProvider = isRowNullProvider; this.emptyChunkSupplier = emptyChunkSupplier; this.elementSize = elementSize; this.dhNullable = dhNullable; } @Override - public final DrainableColumn getEmptyInputStream(final @NotNull ChunkReader.Options options) throws IOException { - return getInputStream(makeContext(emptyChunkSupplier.get(), 0), null, options); + public final DrainableColumn getEmptyInputStream(final @NotNull BarrageOptions options) throws IOException { + try (Context context = makeContext(emptyChunkSupplier.get(), 0)) { + return getInputStream(context, null, options); + } } @Override - public Context makeContext( - @NotNull final SourceChunkType chunk, + public Context makeContext( + @NotNull final SOURCE_CHUNK_TYPE chunk, final long rowOffset) { return new Context<>(chunk, rowOffset); } - abstract class BaseChunkInputStream> extends DrainableColumn { - protected final ContextType context; + abstract class BaseChunkInputStream> extends DrainableColumn { + protected final CONTEXT_TYPE context; protected final RowSequence subset; - protected final ChunkReader.Options options; + protected final BarrageOptions options; protected boolean read = false; - private int cachedNullCount = -1; + private int nullCount; BaseChunkInputStream( - @NotNull final ContextType context, + @NotNull final CONTEXT_TYPE context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { this.context = context; context.incrementReferenceCount(); this.options = options; @@ -73,6 +85,16 @@ abstract class BaseChunkInputStream throw new IllegalStateException( "Subset " + this.subset + " is out of bounds for context of size " + context.size()); } + + if (dhNullable && options.useDeephavenNulls()) { + nullCount = 0; + } else { + this.subset.forAllRowKeys(row -> { + if (isRowNullProvider.isRowNull(context.getChunk(), (int) row)) { + ++nullCount; + } + }); + } } @Override @@ -110,19 +132,7 @@ protected boolean sendValidityBuffer() { @Override public int nullCount() { - if (dhNullable && options.useDeephavenNulls()) { - return 0; - } - if (cachedNullCount == -1) { - cachedNullCount = 0; - final SourceChunkType chunk = context.getChunk(); - subset.forAllRowKeys(row -> { - if (chunk.isNullAt((int) row)) { - ++cachedNullCount; - } - }); - } - return cachedNullCount; + return nullCount; } protected long writeValidityBuffer(final DataOutput dos) { @@ -130,26 +140,26 @@ protected long writeValidityBuffer(final DataOutput dos) { return 0; } - final SerContext context = new SerContext(); + final SerContext serContext = new SerContext(); final Runnable flush = () -> { try { - dos.writeLong(context.accumulator); + dos.writeLong(serContext.accumulator); } catch (final IOException e) { throw new UncheckedDeephavenException( "Unexpected exception while draining data to OutputStream: ", e); } - context.accumulator = 0; - context.count = 0; + serContext.accumulator = 0; + serContext.count = 0; }; subset.forAllRowKeys(row -> { - if (!this.context.getChunk().isNullAt((int) row)) { - context.accumulator |= 1L << context.count; + if (!isRowNullProvider.isRowNull(context.getChunk(), (int) row)) { + serContext.accumulator |= 1L << serContext.count; } - if (++context.count == 64) { + if (++serContext.count == 64) { flush.run(); } }); - if (context.count > 0) { + if (serContext.count > 0) { flush.run(); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkWriter.java index 3eafbedb3c8..a892a4d01da 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkWriter.java @@ -8,6 +8,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -22,14 +23,14 @@ public class BooleanChunkWriter extends BaseChunkWriter> { public static final BooleanChunkWriter INSTANCE = new BooleanChunkWriter(); public BooleanChunkWriter() { - super(ByteChunk::getEmptyChunk, 0, false); + super(ByteChunk::isNull, ByteChunk::getEmptyChunk, 0, false); } @Override public DrainableColumn getInputStream( @NotNull final Context> context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new BooleanChunkInputStream(context, subset, options); } @@ -37,7 +38,7 @@ private class BooleanChunkInputStream extends BaseChunkInputStream> context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -79,28 +80,28 @@ public int drainTo(final OutputStream outputStream) throws IOException { bytesWritten += writeValidityBuffer(dos); // write the payload buffer - final SerContext context = new SerContext(); + final SerContext serContext = new SerContext(); final Runnable flush = () -> { try { - dos.writeLong(context.accumulator); + dos.writeLong(serContext.accumulator); } catch (final IOException e) { throw new UncheckedDeephavenException("Unexpected exception while draining data to OutputStream: ", e); } - context.accumulator = 0; - context.count = 0; + serContext.accumulator = 0; + serContext.count = 0; }; subset.forAllRowKeys(row -> { - final byte byteValue = this.context.getChunk().get((int) row); + final byte byteValue = context.getChunk().get((int) row); if (byteValue != NULL_BYTE) { - context.accumulator |= (byteValue > 0 ? 1L : 0L) << context.count; + serContext.accumulator |= (byteValue > 0 ? 1L : 0L) << serContext.count; } - if (++context.count == 64) { + if (++serContext.count == 64) { flush.run(); } }); - if (context.count > 0) { + if (serContext.count > 0) { flush.run(); } bytesWritten += getNumLongsForBitPackOfSize(subset.intSize(DEBUG_NAME)) * (long) Long.BYTES; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java index 612d0920a2d..fb2bdf9e636 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java @@ -13,7 +13,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -29,13 +29,13 @@ public class ByteChunkReader extends BaseChunkReader> private static final String DEBUG_NAME = "ByteChunkReader"; @FunctionalInterface - public interface ToByteTransformFunction> { - byte get(WireChunkType wireValues, int wireOffset); + public interface ToByteTransformFunction> { + byte get(WIRE_CHUNK_TYPE wireValues, int wireOffset); } - public static , T extends ChunkReader> ChunkReader> transformTo( + public static , T extends ChunkReader> ChunkReader> transformTo( final T wireReader, - final ToByteTransformFunction wireTransform) { + final ToByteTransformFunction wireTransform) { return new TransformingChunkReader<>( wireReader, WritableByteChunk::makeWritableChunk, @@ -44,7 +44,7 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final ChunkReader.Options options; + private final BarrageOptions options; private final ByteConversion conversion; @FunctionalInterface @@ -54,11 +54,11 @@ public interface ByteConversion { ByteConversion IDENTITY = (byte a) -> a; } - public ByteChunkReader(ChunkReader.Options options) { + public ByteChunkReader(BarrageOptions options) { this(options, ByteConversion.IDENTITY); } - public ByteChunkReader(ChunkReader.Options options, ByteConversion conversion) { + public ByteChunkReader(BarrageOptions options, ByteConversion conversion) { this.options = options; this.conversion = conversion; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkWriter.java index fc86adae55c..bf5b820e4c2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ByteChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class ByteChunkWriter> extends BaseChunkWriter { +public class ByteChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "ByteChunkWriter"; - public static final ByteChunkWriter> INSTANCE = new ByteChunkWriter<>( - ByteChunk::getEmptyChunk, ByteChunk::get); + public static final ByteChunkWriter> IDENTITY_INSTANCE = new ByteChunkWriter<>( + ByteChunk::isNull, ByteChunk::getEmptyChunk, ByteChunk::get); @FunctionalInterface public interface ToByteTransformFunction> { byte get(SourceChunkType sourceValues, int offset); } - private final ToByteTransformFunction transform; + private final ToByteTransformFunction transform; public ByteChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToByteTransformFunction transform) { - super(emptyChunkSupplier, Byte.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToByteTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Byte.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new ByteChunkInputStream(context, subset, options); } - private class ByteChunkInputStream extends BaseChunkInputStream> { + private class ByteChunkInputStream extends BaseChunkInputStream> { private ByteChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java index 96f3984db84..e51e02b3c98 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java @@ -9,6 +9,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -25,13 +26,13 @@ public class CharChunkReader extends BaseChunkReader> private static final String DEBUG_NAME = "CharChunkReader"; @FunctionalInterface - public interface ToCharTransformFunction> { - char get(WireChunkType wireValues, int wireOffset); + public interface ToCharTransformFunction> { + char get(WIRE_CHUNK_TYPE wireValues, int wireOffset); } - public static , T extends ChunkReader> ChunkReader> transformTo( + public static , T extends ChunkReader> ChunkReader> transformTo( final T wireReader, - final ToCharTransformFunction wireTransform) { + final ToCharTransformFunction wireTransform) { return new TransformingChunkReader<>( wireReader, WritableCharChunk::makeWritableChunk, @@ -40,7 +41,7 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final ChunkReader.Options options; + private final BarrageOptions options; private final CharConversion conversion; @FunctionalInterface @@ -50,11 +51,11 @@ public interface CharConversion { CharConversion IDENTITY = (char a) -> a; } - public CharChunkReader(ChunkReader.Options options) { + public CharChunkReader(BarrageOptions options) { this(options, CharConversion.IDENTITY); } - public CharChunkReader(ChunkReader.Options options, CharConversion conversion) { + public CharChunkReader(BarrageOptions options, CharConversion conversion) { this.options = options; this.conversion = conversion; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkWriter.java index e3875c635b3..60117620765 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkWriter.java @@ -8,6 +8,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.CharChunk; import org.jetbrains.annotations.NotNull; @@ -17,38 +18,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class CharChunkWriter> extends BaseChunkWriter { +public class CharChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "CharChunkWriter"; - public static final CharChunkWriter> INSTANCE = new CharChunkWriter<>( - CharChunk::getEmptyChunk, CharChunk::get); + public static final CharChunkWriter> IDENTITY_INSTANCE = new CharChunkWriter<>( + CharChunk::isNull, CharChunk::getEmptyChunk, CharChunk::get); @FunctionalInterface public interface ToCharTransformFunction> { char get(SourceChunkType sourceValues, int offset); } - private final ToCharTransformFunction transform; + private final ToCharTransformFunction transform; public CharChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToCharTransformFunction transform) { - super(emptyChunkSupplier, Character.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToCharTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Character.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new CharChunkInputStream(context, subset, options); } - private class CharChunkInputStream extends BaseChunkInputStream> { + private class CharChunkInputStream extends BaseChunkInputStream> { private CharChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -62,8 +64,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java index 571d9ae38f2..67285d0f897 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java @@ -5,10 +5,9 @@ import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.ColumnConversionMode; -import io.deephaven.util.QueryConstants; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.util.annotations.FinalDefault; -import org.apache.arrow.flatbuf.Field; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,37 +20,6 @@ * Consumes Flight/Barrage streams and transforms them into WritableChunks. */ public interface ChunkReader> { - interface Options { - /** - * @return whether we encode the validity buffer to express null values or {@link QueryConstants}'s NULL values. - */ - boolean useDeephavenNulls(); - - /** - * @return the conversion mode to use for object columns - */ - ColumnConversionMode columnConversionMode(); - - /** - * @return the ideal number of records to send per record batch - */ - int batchSize(); - - /** - * @return the maximum number of bytes that should be sent in a single message. - */ - int maxMessageSize(); - - /** - * Some Flight clients cannot handle modifications that have irregular column counts. These clients request that - * the server wrap all columns in a list to enable each column having a variable length. - * - * @return true if the columns should be wrapped in a list - */ - default boolean columnsAsList() { - return false; - } - } /** * Reads the given DataInput to extract the next Arrow buffer as a Deephaven Chunk. @@ -104,54 +72,7 @@ interface Factory { * @return a ChunkReader based on the given options, factory, and type to read */ > ChunkReader newReader( - @NotNull TypeInfo typeInfo, - @NotNull Options options); - } - - /** - * Describes type info used by factory implementations when creating a ChunkReader. - */ - class TypeInfo { - private final Class type; - @Nullable - private final Class componentType; - private final Field arrowField; - - public TypeInfo( - @NotNull final Class type, - @Nullable final Class componentType, - @NotNull final Field arrowField) { - this.type = type; - this.componentType = componentType; - this.arrowField = arrowField; - } - - public Class type() { - return type; - } - - @Nullable - public Class componentType() { - return componentType; - } - - public Field arrowField() { - return arrowField; - } - } - - /** - * Factory method to create a TypeInfo instance. - * - * @param type the Java type to be read into the chunk - * @param componentType the Java type of nested components - * @param arrowField the Arrow type to be read into the chunk - * @return a TypeInfo instance - */ - static TypeInfo typeInfo( - @NotNull final Class type, - @Nullable final Class componentType, - @NotNull final Field arrowField) { - return new TypeInfo(type, componentType, arrowField); + @NotNull BarrageTypeInfo typeInfo, + @NotNull BarrageOptions options); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkWriter.java index 8af6c3281c2..d3a19f78a98 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkWriter.java @@ -6,6 +6,8 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.util.SafeCloseable; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -16,10 +18,7 @@ import java.io.IOException; -public interface ChunkWriter> { - long MS_PER_DAY = 24 * 60 * 60 * 1000L; - long NS_PER_MS = 1_000_000L; - long NS_PER_DAY = MS_PER_DAY * NS_PER_MS; +public interface ChunkWriter> { /** * Creator of {@link ChunkWriter} instances. @@ -28,50 +27,58 @@ public interface ChunkWriter> { */ interface Factory { /** - * Returns a {@link ChunkReader} for the specified arguments. + * Returns a {@link ChunkWriter} for the specified arguments. * - * @param typeInfo the type of data to read into a chunk - * @return a ChunkReader based on the given options, factory, and type to read + * @param typeInfo the type of data to write into a chunk + * @return a ChunkWriter based on the given options, factory, and type to write */ > ChunkWriter newWriter( - @NotNull ChunkReader.TypeInfo typeInfo); + @NotNull BarrageTypeInfo typeInfo); } /** * Create a context for the given chunk. * * @param chunk the chunk of data to be written - * @param rowOffset the number of rows that were sent before the first row in this logical message + * @param rowOffset the offset into the logical message potentially spread over multiple chunks * @return a context for the given chunk */ - Context makeContext(final SourceChunkType chunk, final long rowOffset); + Context makeContext( + @NotNull SOURCE_CHUNK_TYPE chunk, + long rowOffset); /** * Get an input stream optionally position-space filtered using the provided RowSet. * * @param context the chunk writer context holding the data to be drained to the client * @param subset if provided, is a position-space filter of source data - * @param options options for reading the stream + * @param options options for writing to the stream * @return a single-use DrainableColumn ready to be drained via grpc */ DrainableColumn getInputStream( - @NotNull Context context, + @NotNull Context context, @Nullable RowSet subset, - @NotNull ChunkReader.Options options) throws IOException; + @NotNull BarrageOptions options) throws IOException; /** * Get an input stream representing the empty wire payload for this writer. * - * @param options options for reading the stream + * @param options options for writing to the stream * @return a single-use DrainableColumn ready to be drained via grpc */ DrainableColumn getEmptyInputStream( - @NotNull ChunkReader.Options options) throws IOException; + @NotNull BarrageOptions options) throws IOException; class Context> extends ReferenceCounted implements SafeCloseable { private final T chunk; private final long rowOffset; + /** + * Create a new context for the given chunk. + * + * @param chunk the chunk of data to be written + * @param rowOffset the offset into the logical message potentially spread over multiple chunks + */ public Context(final T chunk, final long rowOffset) { super(1); this.chunk = chunk; @@ -86,7 +93,7 @@ T getChunk() { } /** - * @return the number of rows that were sent before the first row in this writer. + * @return the offset into the logical message potentially spread over multiple chunks */ public long getRowOffset() { return rowOffset; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java index b90b915b63b..40ba0775100 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java @@ -16,7 +16,8 @@ import io.deephaven.chunk.WritableShortChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.table.impl.lang.QueryLanguageFunctionUtils; -import io.deephaven.engine.table.impl.sources.ReinterpretUtils; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.array.ArrayExpansionKernel; import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; import io.deephaven.internal.log.LoggerFactory; @@ -48,8 +49,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static io.deephaven.extensions.barrage.chunk.ChunkWriter.MS_PER_DAY; - /** * JVM implementation of {@link ChunkReader.Factory}, suitable for use in Java clients and servers. This default * implementation may not round trip flight types in a stable way, but will round trip Deephaven table definitions and @@ -71,8 +70,8 @@ public class DefaultChunkReaderFactory implements ChunkReader.Factory { protected interface ChunkReaderFactory { ChunkReader> make( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options); + final BarrageTypeInfo typeInfo, + final BarrageOptions options); } // allow subclasses to modify this as they wish @@ -134,8 +133,8 @@ protected DefaultChunkReaderFactory() { @Override public > ChunkReader newReader( - @NotNull final ChunkReader.TypeInfo typeInfo, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageTypeInfo typeInfo, + @NotNull final BarrageOptions options) { // TODO (deephaven/deephaven-core#6033): Run-End Support // TODO (deephaven/deephaven-core#6034): Dictionary Support @@ -194,19 +193,19 @@ public > ChunkReader newReader( fixedSizeLength = ((ArrowType.FixedSizeList) field.getType()).getListSize(); } - final ChunkReader.TypeInfo componentTypeInfo; + final BarrageTypeInfo componentTypeInfo; final boolean useVectorKernels = Vector.class.isAssignableFrom(typeInfo.type()); if (useVectorKernels) { final Class componentType = VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - componentTypeInfo = new ChunkReader.TypeInfo( + componentTypeInfo = new BarrageTypeInfo( componentType, componentType.getComponentType(), typeInfo.arrowField().children(0)); } else if (typeInfo.type().isArray()) { final Class componentType = typeInfo.componentType(); // noinspection DataFlowIssue - componentTypeInfo = new ChunkReader.TypeInfo( + componentTypeInfo = new BarrageTypeInfo( componentType, componentType.getComponentType(), typeInfo.arrowField().children(0)); @@ -334,8 +333,8 @@ private static long factorForTimeUnit(final TimeUnit unit) { private static ChunkReader> timestampToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final long factor = factorForTimeUnit(((ArrowType.Timestamp) arrowType).getUnit()); return factor == 1 ? new LongChunkReader(options) @@ -345,8 +344,8 @@ private static ChunkReader> timestampToLong( private static ChunkReader> timestampToInstant( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final long factor = factorForTimeUnit(((ArrowType.Timestamp) arrowType).getUnit()); return new FixedWidthChunkReader<>(Long.BYTES, true, options, io -> { final long value = io.readLong(); @@ -359,8 +358,8 @@ private static ChunkReader> timestampToInst private static ChunkReader> timestampToZonedDateTime( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Timestamp tsType = (ArrowType.Timestamp) arrowType; final String timezone = tsType.getTimezone(); final ZoneId tz = timezone == null ? ZoneId.systemDefault() : DateTimeUtils.parseTimeZone(timezone); @@ -376,8 +375,8 @@ private static ChunkReader> timestamp private static ChunkReader> timestampToLocalDateTime( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Timestamp tsType = (ArrowType.Timestamp) arrowType; final ZoneId tz = DateTimeUtils.parseTimeZone(tsType.getTimezone()); final long factor = factorForTimeUnit(tsType.getUnit()); @@ -393,15 +392,15 @@ private static ChunkReader> timestamp private static ChunkReader> utf8ToString( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new VarBinaryChunkReader<>((buf, off, len) -> new String(buf, off, len, Charsets.UTF_8)); } private static ChunkReader> durationToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final long factor = factorForTimeUnit(((ArrowType.Duration) arrowType).getUnit()); return factor == 1 ? new LongChunkReader(options) @@ -411,8 +410,8 @@ private static ChunkReader> durationToLong( private static ChunkReader> durationToDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final long factor = factorForTimeUnit(((ArrowType.Duration) arrowType).getUnit()); return transformToObject(new LongChunkReader(options), (chunk, ii) -> { long value = chunk.get(ii); @@ -422,22 +421,22 @@ private static ChunkReader> durationToDura private static ChunkReader> floatingPointToFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new FloatChunkReader(((ArrowType.FloatingPoint) arrowType).getPrecision().getFlatbufID(), options); } private static ChunkReader> floatingPointToDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new DoubleChunkReader(((ArrowType.FloatingPoint) arrowType).getPrecision().getFlatbufID(), options); } private static ChunkReader> floatingPointToBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return transformToObject( new DoubleChunkReader(((ArrowType.FloatingPoint) arrowType).getPrecision().getFlatbufID(), options), (chunk, ii) -> { @@ -448,22 +447,22 @@ private static ChunkReader> floatingPoin private static ChunkReader> binaryToByteArray( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new VarBinaryChunkReader<>((buf, off, len) -> Arrays.copyOfRange(buf, off, off + len)); } private static ChunkReader> binaryToBigInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new VarBinaryChunkReader<>(BigInteger::new); } private static ChunkReader> binaryToBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new VarBinaryChunkReader<>((final byte[] buf, final int offset, final int length) -> { // read the int scale value as little endian, arrow's endianness. final byte b1 = buf[offset]; @@ -477,8 +476,8 @@ private static ChunkReader> binaryToBigD private static ChunkReader> timeToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // See timeToLocalTime's comment for more information on wire format. final ArrowType.Time timeType = (ArrowType.Time) arrowType; final int bitWidth = timeType.getBitWidth(); @@ -503,8 +502,8 @@ private static ChunkReader> timeToLong( private static ChunkReader> timeToLocalTime( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { /* * Time is either a 32-bit or 64-bit signed integer type representing an elapsed time since midnight, stored in * either of four units: seconds, milliseconds, microseconds or nanoseconds. @@ -544,48 +543,48 @@ private static ChunkReader> timeToLocalTi private static ChunkReader> decimalToByte( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return ByteChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); } private static ChunkReader> decimalToChar( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return CharChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.charCast(chunk.get(ii))); } private static ChunkReader> decimalToShort( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return ShortChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); } private static ChunkReader> decimalToInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return IntChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); } private static ChunkReader> decimalToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return LongChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); } private static ChunkReader> decimalToBigInteger( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // note this mapping is particularly useful if scale == 0 final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; @@ -613,24 +612,24 @@ private static ChunkReader> decimalToBig private static ChunkReader> decimalToFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return FloatChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.floatCast(chunk.get(ii))); } private static ChunkReader> decimalToDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return DoubleChunkReader.transformTo(decimalToBigDecimal(arrowType, typeInfo, options), (chunk, ii) -> QueryLanguageFunctionUtils.doubleCast(chunk.get(ii))); } private static ChunkReader> decimalToBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -657,8 +656,8 @@ private static ChunkReader> decimalToBig private static ChunkReader> intToByte( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); @@ -685,8 +684,8 @@ private static ChunkReader> intToByte( private static ChunkReader> intToShort( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -714,8 +713,8 @@ private static ChunkReader> intToShort( private static ChunkReader> intToInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -742,8 +741,8 @@ private static ChunkReader> intToInt( private static ChunkReader> intToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -770,8 +769,8 @@ private static ChunkReader> intToLong( private static ChunkReader> intToBigInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -796,8 +795,8 @@ private static ChunkReader> intToBigInt( private static ChunkReader> intToFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean signed = intType.getIsSigned(); @@ -805,16 +804,16 @@ private static ChunkReader> intToFloat( switch (bitWidth) { case 8: return FloatChunkReader.transformTo(new ByteChunkReader(options), - (chunk, ii) -> floatCast(Byte.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> floatCast(Byte.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 16: return FloatChunkReader.transformTo(new ShortChunkReader(options), - (chunk, ii) -> floatCast(Short.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> floatCast(Short.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 32: return FloatChunkReader.transformTo(new IntChunkReader(options), - (chunk, ii) -> floatCast(Integer.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> floatCast(Integer.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 64: return FloatChunkReader.transformTo(new LongChunkReader(options), - (chunk, ii) -> floatCast(Long.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> floatCast(Long.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); } @@ -846,8 +845,8 @@ private static float floatCast( private static ChunkReader> intToDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean signed = intType.getIsSigned(); @@ -855,16 +854,16 @@ private static ChunkReader> intToDouble( switch (bitWidth) { case 8: return DoubleChunkReader.transformTo(new ByteChunkReader(options), - (chunk, ii) -> doubleCast(Byte.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> doubleCast(Byte.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 16: return DoubleChunkReader.transformTo(new ShortChunkReader(options), - (chunk, ii) -> doubleCast(Short.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> doubleCast(Short.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 32: return DoubleChunkReader.transformTo(new IntChunkReader(options), - (chunk, ii) -> doubleCast(Integer.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> doubleCast(Integer.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); case 64: return DoubleChunkReader.transformTo(new LongChunkReader(options), - (chunk, ii) -> doubleCast(Long.BYTES, signed, chunk.isNullAt(ii), chunk.get(ii))); + (chunk, ii) -> doubleCast(Long.BYTES, signed, chunk.isNull(ii), chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); } @@ -896,8 +895,8 @@ private static double doubleCast( private static ChunkReader> intToBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -924,8 +923,8 @@ private static ChunkReader> intToBigDeci private static ChunkReader> intToChar( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); final boolean unsigned = !intType.getIsSigned(); @@ -936,7 +935,12 @@ private static ChunkReader> intToChar( (chunk, ii) -> maskIfOverflow(unsigned, Byte.BYTES, QueryLanguageFunctionUtils.charCast(chunk.get(ii)))); case 16: - return new CharChunkReader(options); + if (unsigned) { + return new CharChunkReader(options); + } else { + return CharChunkReader.transformTo(new ShortChunkReader(options), + (chunk, ii) -> QueryLanguageFunctionUtils.charCast(chunk.get(ii))); + } case 32: // note unsigned mappings to char will overflow short; but user has asked for this return CharChunkReader.transformTo(new IntChunkReader(options), @@ -952,15 +956,15 @@ private static ChunkReader> intToChar( private static ChunkReader> boolToBoolean( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { return new BooleanChunkReader(); } private static ChunkReader> fixedSizeBinaryToByteArray( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { final ArrowType.FixedSizeBinary fixedSizeBinary = (ArrowType.FixedSizeBinary) arrowType; final int elementWidth = fixedSizeBinary.getByteWidth(); return new FixedWidthChunkReader<>(elementWidth, false, options, (dataInput) -> { @@ -972,17 +976,18 @@ private static ChunkReader> fixedSizeBinaryT private static ChunkReader> dateToInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // see dateToLocalDate's comment for more information on wire format final ArrowType.Date dateType = (ArrowType.Date) arrowType; switch (dateType.getUnit()) { case DAY: return new IntChunkReader(options); case MILLISECOND: + final long factor = Duration.ofDays(1).toMillis(); return IntChunkReader.transformTo(new LongChunkReader(options), (chunk, ii) -> { long value = chunk.get(ii); - return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_INT : (int) (value / MS_PER_DAY); + return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_INT : (int) (value / factor); }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -991,8 +996,8 @@ private static ChunkReader> dateToInt( private static ChunkReader> dateToLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // see dateToLocalDate's comment for more information on wire format final ArrowType.Date dateType = (ArrowType.Date) arrowType; switch (dateType.getUnit()) { @@ -1000,9 +1005,10 @@ private static ChunkReader> dateToLong( return LongChunkReader.transformTo(new IntChunkReader(options), (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); case MILLISECOND: + final long factor = Duration.ofDays(1).toMillis(); return LongChunkReader.transformTo(new LongChunkReader(options), (chunk, ii) -> { long value = chunk.get(ii); - return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / MS_PER_DAY; + return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -1011,8 +1017,8 @@ private static ChunkReader> dateToLong( private static ChunkReader> dateToLocalDate( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { /* * Date is either a 32-bit or 64-bit signed integer type representing an elapsed time since UNIX epoch * (1970-01-01), stored in either of two units: @@ -1031,11 +1037,12 @@ private static ChunkReader> dateToLocalDa return value == QueryConstants.NULL_INT ? null : DateTimeUtils.epochDaysToLocalDate(value); }); case MILLISECOND: + final long factor = Duration.ofDays(1).toMillis(); return transformToObject(new LongChunkReader(options), (chunk, ii) -> { long value = chunk.get(ii); return value == QueryConstants.NULL_LONG ? null - : DateTimeUtils.epochDaysToLocalDate(value / MS_PER_DAY); + : DateTimeUtils.epochDaysToLocalDate(value / factor); }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -1044,8 +1051,8 @@ private static ChunkReader> dateToLocalDa private static ChunkReader> intervalToDurationLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // See intervalToPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; @@ -1073,8 +1080,8 @@ private static ChunkReader> intervalToDurationLong( private static ChunkReader> intervalToDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // See intervalToPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; @@ -1098,8 +1105,8 @@ private static ChunkReader> intervalToDura private static ChunkReader> intervalToPeriod( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { /* * A "calendar" interval which models types that don't necessarily have a precise duration without the context * of a base timestamp (e.g. days can differ in length during day light savings time transitions). All integers @@ -1130,18 +1137,19 @@ private static ChunkReader> intervalToPeriod return value == QueryConstants.NULL_INT ? null : Period.ofMonths(value); }); case DAY_TIME: + final long factor = Duration.ofDays(1).toMillis(); return new FixedWidthChunkReader<>(Integer.BYTES * 2, false, options, dataInput -> { final int days = dataInput.readInt(); final int millis = dataInput.readInt(); - return Period.ofDays(days).plusDays(millis / MS_PER_DAY); + return Period.ofDays(days).plusDays(millis / factor); }); case MONTH_DAY_NANO: + final long nsPerDay = Duration.ofDays(1).toNanos(); return new FixedWidthChunkReader<>(Integer.BYTES * 2 + Long.BYTES, false, options, dataInput -> { final int months = dataInput.readInt(); final int days = dataInput.readInt(); final long nanos = dataInput.readLong(); - final long NANOS_PER_MS = 1_000_000; - return Period.of(0, months, days).plusDays(nanos / (MS_PER_DAY * NANOS_PER_MS)); + return Period.of(0, months, days).plusDays(nanos / (nsPerDay)); }); default: throw new IllegalArgumentException("Unexpected interval unit: " + intervalType.getUnit()); @@ -1150,8 +1158,8 @@ private static ChunkReader> intervalToPeriod private static ChunkReader> intervalToPeriodDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo, - final ChunkReader.Options options) { + final BarrageTypeInfo typeInfo, + final BarrageOptions options) { // See intervalToPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java index 34d42349a60..593d3f17080 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java @@ -18,6 +18,7 @@ import io.deephaven.engine.table.impl.lang.QueryLanguageFunctionUtils; import io.deephaven.engine.table.impl.preview.ArrayPreview; import io.deephaven.engine.table.impl.preview.DisplayWrapper; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.array.ArrayExpansionKernel; import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; import io.deephaven.extensions.barrage.util.Float16; @@ -46,6 +47,7 @@ import java.time.LocalTime; import java.time.Period; import java.time.ZonedDateTime; +import java.util.EnumMap; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; @@ -59,14 +61,18 @@ public class DefaultChunkWriterFactory implements ChunkWriter.Factory { public static final Logger log = LoggerFactory.getLogger(DefaultChunkWriterFactory.class); public static final ChunkWriter.Factory INSTANCE = new DefaultChunkWriterFactory(); - protected interface ChunkWriterFactory { + /** + * This supplier interface simplifies the cost to operate off of the ArrowType directly since the Arrow POJO is not + * yet supported over GWT. + */ + protected interface ArrowTypeChunkWriterSupplier { ChunkWriter> make( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo); + final BarrageTypeInfo typeInfo); } - private final Map, ChunkWriterFactory>> registeredFactories = - new HashMap<>(); + private final Map, ArrowTypeChunkWriterSupplier>> registeredFactories = + new EnumMap<>(ArrowType.ArrowTypeID.class); protected DefaultChunkWriterFactory() { register(ArrowType.ArrowTypeID.Timestamp, long.class, DefaultChunkWriterFactory::timestampFromLong); @@ -124,7 +130,7 @@ protected DefaultChunkWriterFactory() { @Override public > ChunkWriter newWriter( - @NotNull final ChunkReader.TypeInfo typeInfo) { + @NotNull final BarrageTypeInfo typeInfo) { // TODO (deephaven/deephaven-core#6033): Run-End Support // TODO (deephaven/deephaven-core#6034): Dictionary Support @@ -143,7 +149,7 @@ public > ChunkWriter newWriter( typeInfo.type().getCanonicalName())); } - final Map, ChunkWriterFactory> knownWriters = registeredFactories.get(typeId); + final Map, ArrowTypeChunkWriterSupplier> knownWriters = registeredFactories.get(typeId); if (knownWriters == null && !isSpecialType) { throw new UnsupportedOperationException(String.format( "No known ChunkWriter for arrow type %s from %s.", @@ -151,14 +157,17 @@ public > ChunkWriter newWriter( typeInfo.type().getCanonicalName())); } - final ChunkWriterFactory chunkWriterFactory = knownWriters == null ? null : knownWriters.get(typeInfo.type()); + final ArrowTypeChunkWriterSupplier chunkWriterFactory = + knownWriters == null ? null : knownWriters.get(typeInfo.type()); if (chunkWriterFactory != null) { // noinspection unchecked final ChunkWriter writer = (ChunkWriter) chunkWriterFactory.make(field.getType(), typeInfo); if (writer != null) { return writer; } - } else if (!isSpecialType) { + } + + if (!isSpecialType) { throw new UnsupportedOperationException(String.format( "No known ChunkWriter for arrow type %s from %s. Supported types: %s", field.getType().toString(), @@ -167,7 +176,8 @@ public > ChunkWriter newWriter( } if (typeId == ArrowType.ArrowTypeID.Null) { - return new NullChunkWriter<>(); + // noinspection unchecked + return (ChunkWriter) NullChunkWriter.INSTANCE; } if (typeId == ArrowType.ArrowTypeID.List @@ -183,19 +193,19 @@ public > ChunkWriter newWriter( fixedSizeLength = ((ArrowType.FixedSizeList) field.getType()).getListSize(); } - final ChunkReader.TypeInfo componentTypeInfo; + final BarrageTypeInfo componentTypeInfo; final boolean useVectorKernels = Vector.class.isAssignableFrom(typeInfo.type()); if (useVectorKernels) { final Class componentType = VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - componentTypeInfo = new ChunkReader.TypeInfo( + componentTypeInfo = new BarrageTypeInfo( componentType, componentType.getComponentType(), typeInfo.arrowField().children(0)); } else if (typeInfo.type().isArray()) { final Class componentType = typeInfo.componentType(); // noinspection DataFlowIssue - componentTypeInfo = new ChunkReader.TypeInfo( + componentTypeInfo = new BarrageTypeInfo( componentType, componentType.getComponentType(), typeInfo.arrowField().children(0)); @@ -260,7 +270,7 @@ public > ChunkWriter newWriter( protected void register( final ArrowType.ArrowTypeID arrowType, final Class deephavenType, - final ChunkWriterFactory chunkWriterFactory) { + final ArrowTypeChunkWriterSupplier chunkWriterFactory) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(deephavenType, chunkWriterFactory); @@ -268,31 +278,38 @@ protected void register( if (deephavenType == byte.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Byte.class, (at, typeInfo) -> new ByteChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == short.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Short.class, (at, typeInfo) -> new ShortChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == int.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Integer.class, (at, typeInfo) -> new IntChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == long.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Long.class, (at, typeInfo) -> new LongChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == char.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Character.class, (at, typeInfo) -> new CharChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == float.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Float.class, (at, typeInfo) -> new FloatChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } else if (deephavenType == double.class) { registeredFactories.computeIfAbsent(arrowType, k -> new HashMap<>()) .put(Double.class, (at, typeInfo) -> new DoubleChunkWriter>( - ObjectChunk::getEmptyChunk, (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); + ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + (chunk, ii) -> TypeUtils.unbox(chunk.get(ii)))); } } @@ -313,26 +330,36 @@ private static long factorForTimeUnit(final TimeUnit unit) { private static ChunkWriter> timestampFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Timestamp tsType = (ArrowType.Timestamp) arrowType; final long factor = factorForTimeUnit(tsType.getUnit()); - return new LongChunkWriter<>(LongChunk::getEmptyChunk, (Chunk source, int offset) -> { - // unfortunately we do not know whether ReinterpretUtils can convert the column source to longs or not - if (source instanceof LongChunk) { - final long value = source.asLongChunk().get(offset); - return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; - } + // TODO (https://github.com/deephaven/deephaven-core/issues/5241): Inconsistent handling of ZonedDateTime + // we do not know whether the incoming chunk source is a LongChunk or ObjectChunk + return new LongChunkWriter<>( + (Chunk source, int offset) -> { + if (source instanceof LongChunk) { + return source.asLongChunk().isNull(offset); + } - final ZonedDateTime value = source.asObjectChunk().get(offset); - return value == null ? QueryConstants.NULL_LONG : DateTimeUtils.epochNanos(value) / factor; - }); + return source.asObjectChunk().isNull(offset); + }, + LongChunk::getEmptyChunk, + (Chunk source, int offset) -> { + if (source instanceof LongChunk) { + final long value = source.asLongChunk().get(offset); + return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; + } + + final ZonedDateTime value = source.asObjectChunk().get(offset); + return value == null ? QueryConstants.NULL_LONG : DateTimeUtils.epochNanos(value) / factor; + }); } private static ChunkWriter> timestampFromInstant( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final long factor = factorForTimeUnit(((ArrowType.Timestamp) arrowType).getUnit()); - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, (source, offset) -> { + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> { final Instant value = source.get(offset); return value == null ? QueryConstants.NULL_LONG : DateTimeUtils.epochNanos(value) / factor; }); @@ -340,10 +367,10 @@ private static ChunkWriter> timestampFromInstant( private static ChunkWriter> timestampFromZonedDateTime( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Timestamp tsType = (ArrowType.Timestamp) arrowType; final long factor = factorForTimeUnit(tsType.getUnit()); - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, (source, offset) -> { + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> { final ZonedDateTime value = source.get(offset); return value == null ? QueryConstants.NULL_LONG : DateTimeUtils.epochNanos(value) / factor; }); @@ -351,23 +378,23 @@ private static ChunkWriter> timestampFromZone private static ChunkWriter> utf8FromString( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new VarBinaryChunkWriter<>((out, item) -> out.write(item.getBytes(StandardCharsets.UTF_8))); } private static ChunkWriter> utf8FromObject( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new VarBinaryChunkWriter<>((out, item) -> out.write(item.toString().getBytes(StandardCharsets.UTF_8))); } private static ChunkWriter> durationFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final long factor = factorForTimeUnit(((ArrowType.Duration) arrowType).getUnit()); return factor == 1 - ? LongChunkWriter.INSTANCE - : new LongChunkWriter<>(LongChunk::getEmptyChunk, (source, offset) -> { + ? LongChunkWriter.IDENTITY_INSTANCE + : new LongChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (source, offset) -> { final long value = source.get(offset); return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; }); @@ -375,9 +402,9 @@ private static ChunkWriter> durationFromLong( private static ChunkWriter> durationFromDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final long factor = factorForTimeUnit(((ArrowType.Duration) arrowType).getUnit()); - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, (source, offset) -> { + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> { final Duration value = source.get(offset); return value == null ? QueryConstants.NULL_LONG : value.toNanos() / factor; }); @@ -385,11 +412,11 @@ private static ChunkWriter> durationFromDuration( private static ChunkWriter> floatingPointFromFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.FloatingPoint fpType = (ArrowType.FloatingPoint) arrowType; switch (fpType.getPrecision()) { case HALF: - return new ShortChunkWriter<>(FloatChunk::getEmptyChunk, (source, offset) -> { + return new ShortChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (source, offset) -> { final double value = source.get(offset); return value == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_SHORT @@ -397,10 +424,10 @@ private static ChunkWriter> floatingPointFromFloat( }); case SINGLE: - return FloatChunkWriter.INSTANCE; + return FloatChunkWriter.IDENTITY_INSTANCE; case DOUBLE: - return new DoubleChunkWriter<>(FloatChunk::getEmptyChunk, + return new DoubleChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (source, offset) -> QueryLanguageFunctionUtils.doubleCast(source.get(offset))); default: @@ -410,11 +437,11 @@ private static ChunkWriter> floatingPointFromFloat( private static ChunkWriter> floatingPointFromDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.FloatingPoint fpType = (ArrowType.FloatingPoint) arrowType; switch (fpType.getPrecision()) { case HALF: - return new ShortChunkWriter<>(DoubleChunk::getEmptyChunk, (source, offset) -> { + return new ShortChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (source, offset) -> { final double value = source.get(offset); return value == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_SHORT @@ -422,10 +449,10 @@ private static ChunkWriter> floatingPointFromDouble( }); case SINGLE: - return new FloatChunkWriter<>(DoubleChunk::getEmptyChunk, + return new FloatChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (source, offset) -> QueryLanguageFunctionUtils.floatCast(source.get(offset))); case DOUBLE: - return DoubleChunkWriter.INSTANCE; + return DoubleChunkWriter.IDENTITY_INSTANCE; default: throw new IllegalArgumentException("Unexpected floating point precision: " + fpType.getPrecision()); @@ -434,11 +461,11 @@ private static ChunkWriter> floatingPointFromDouble( private static ChunkWriter> floatingPointFromBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.FloatingPoint fpType = (ArrowType.FloatingPoint) arrowType; switch (fpType.getPrecision()) { case HALF: - return new ShortChunkWriter<>(ObjectChunk::getEmptyChunk, (source, offset) -> { + return new ShortChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> { final BigDecimal value = source.get(offset); return value == null ? QueryConstants.NULL_SHORT @@ -446,11 +473,11 @@ private static ChunkWriter> floatingPointFromBig }); case SINGLE: - return new FloatChunkWriter<>(ObjectChunk::getEmptyChunk, + return new FloatChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> QueryLanguageFunctionUtils.floatCast(source.get(offset))); case DOUBLE: - return new DoubleChunkWriter<>(ObjectChunk::getEmptyChunk, + return new DoubleChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (source, offset) -> QueryLanguageFunctionUtils.doubleCast(source.get(offset))); default: @@ -460,19 +487,19 @@ private static ChunkWriter> floatingPointFromBig private static ChunkWriter> binaryFromByteArray( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new VarBinaryChunkWriter<>(OutputStream::write); } private static ChunkWriter> binaryFromBigInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new VarBinaryChunkWriter<>((out, item) -> out.write(item.toByteArray())); } private static ChunkWriter> binaryFromBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new VarBinaryChunkWriter<>((out, item) -> { final BigDecimal normal = item.stripTrailingZeros(); final int v = normal.scale(); @@ -487,14 +514,14 @@ private static ChunkWriter> binaryFromBigDecimal private static ChunkWriter> timeFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // See timeFromLocalTime's comment for more information on wire format. final ArrowType.Time timeType = (ArrowType.Time) arrowType; final int bitWidth = timeType.getBitWidth(); final long factor = factorForTimeUnit(timeType.getUnit()); switch (bitWidth) { case 32: - return new IntChunkWriter<>(LongChunk::getEmptyChunk, (chunk, ii) -> { + return new IntChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> { // note: do math prior to truncation long value = chunk.get(ii); value = value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; @@ -502,7 +529,7 @@ private static ChunkWriter> timeFromLong( }); case 64: - return new LongChunkWriter<>(LongChunk::getEmptyChunk, (chunk, ii) -> { + return new LongChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> { long value = chunk.get(ii); return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : value / factor; }); @@ -514,7 +541,7 @@ private static ChunkWriter> timeFromLong( private static ChunkWriter> timeFromLocalTime( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { /* * Time is either a 32-bit or 64-bit signed integer type representing an elapsed time since midnight, stored in * either of four units: seconds, milliseconds, microseconds or nanoseconds. @@ -536,7 +563,7 @@ private static ChunkWriter> timeFromLocalTime( final long factor = factorForTimeUnit(timeType.getUnit()); switch (bitWidth) { case 32: - return new IntChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + return new IntChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { // note: do math prior to truncation final LocalTime lt = chunk.get(ii); final long value = lt == null ? QueryConstants.NULL_LONG : lt.toNanoOfDay() / factor; @@ -544,7 +571,7 @@ private static ChunkWriter> timeFromLocalTime( }); case 64: - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { final LocalTime lt = chunk.get(ii); return lt == null ? QueryConstants.NULL_LONG : lt.toNanoOfDay() / factor; }); @@ -556,7 +583,7 @@ private static ChunkWriter> timeFromLocalTime( private static ChunkWriter> decimalFromByte( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -566,20 +593,21 @@ private static ChunkWriter> decimalFromByte( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(ByteChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - byte value = chunk.get(offset); - if (value == QueryConstants.NULL_BYTE) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(ByteChunk::isNull, ByteChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + byte value = chunk.get(offset); + if (value == QueryConstants.NULL_BYTE) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromChar( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -589,20 +617,21 @@ private static ChunkWriter> decimalFromChar( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(CharChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - char value = chunk.get(offset); - if (value == QueryConstants.NULL_CHAR) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(CharChunk::isNull, CharChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + char value = chunk.get(offset); + if (value == QueryConstants.NULL_CHAR) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromShort( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -612,20 +641,21 @@ private static ChunkWriter> decimalFromShort( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(ShortChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - short value = chunk.get(offset); - if (value == QueryConstants.NULL_SHORT) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(ShortChunk::isNull, ShortChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + short value = chunk.get(offset); + if (value == QueryConstants.NULL_SHORT) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -635,20 +665,21 @@ private static ChunkWriter> decimalFromInt( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(IntChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - int value = chunk.get(offset); - if (value == QueryConstants.NULL_INT) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + int value = chunk.get(offset); + if (value == QueryConstants.NULL_INT) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -658,20 +689,21 @@ private static ChunkWriter> decimalFromLong( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(LongChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - long value = chunk.get(offset); - if (value == QueryConstants.NULL_LONG) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + long value = chunk.get(offset); + if (value == QueryConstants.NULL_LONG) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromBigInteger( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -681,20 +713,21 @@ private static ChunkWriter> decimalFromBigIntege .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - BigInteger value = chunk.get(offset); - if (value == null) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + BigInteger value = chunk.get(offset); + if (value == null) { + out.write(nullValue); + return; + } - writeBigDecimal(out, new BigDecimal(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, new BigDecimal(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -704,20 +737,21 @@ private static ChunkWriter> decimalFromFloat( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(FloatChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - float value = chunk.get(offset); - if (value == QueryConstants.NULL_FLOAT) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + float value = chunk.get(offset); + if (value == QueryConstants.NULL_FLOAT) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -727,20 +761,21 @@ private static ChunkWriter> decimalFromDouble( .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(DoubleChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - double value = chunk.get(offset); - if (value == QueryConstants.NULL_DOUBLE) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + double value = chunk.get(offset); + if (value == QueryConstants.NULL_DOUBLE) { + out.write(nullValue); + return; + } - writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, BigDecimal.valueOf(value), byteWidth, scale, truncationMask, nullValue); + }); } private static ChunkWriter> decimalFromBigDecimal( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Decimal decimalType = (ArrowType.Decimal) arrowType; final int byteWidth = decimalType.getBitWidth() / 8; final int scale = decimalType.getScale(); @@ -750,15 +785,16 @@ private static ChunkWriter> decimalFromBigDecima .subtract(BigInteger.ONE) .negate(); - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, byteWidth, false, (out, chunk, offset) -> { - BigDecimal value = chunk.get(offset); - if (value == null) { - out.write(nullValue); - return; - } + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, byteWidth, false, + (out, chunk, offset) -> { + BigDecimal value = chunk.get(offset); + if (value == null) { + out.write(nullValue); + return; + } - writeBigDecimal(out, value, byteWidth, scale, truncationMask, nullValue); - }); + writeBigDecimal(out, value, byteWidth, scale, truncationMask, nullValue); + }); } private static void writeBigDecimal( @@ -783,21 +819,21 @@ private static void writeBigDecimal( private static ChunkWriter> intFromByte( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return ByteChunkWriter.INSTANCE; + return ByteChunkWriter.IDENTITY_INSTANCE; case 16: - return new ShortChunkWriter<>(ByteChunk::getEmptyChunk, + return new ShortChunkWriter<>(ByteChunk::isNull, ByteChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return new IntChunkWriter<>(ByteChunk::getEmptyChunk, + return new IntChunkWriter<>(ByteChunk::isNull, ByteChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(ByteChunk::getEmptyChunk, + return new LongChunkWriter<>(ByteChunk::isNull, ByteChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -806,21 +842,21 @@ private static ChunkWriter> intFromByte( private static ChunkWriter> intFromShort( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(ShortChunk::getEmptyChunk, + return new ByteChunkWriter<>(ShortChunk::isNull, ShortChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return ShortChunkWriter.INSTANCE; + return ShortChunkWriter.IDENTITY_INSTANCE; case 32: - return new IntChunkWriter<>(ShortChunk::getEmptyChunk, + return new IntChunkWriter<>(ShortChunk::isNull, ShortChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(ShortChunk::getEmptyChunk, + return new LongChunkWriter<>(ShortChunk::isNull, ShortChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -829,21 +865,21 @@ private static ChunkWriter> intFromShort( private static ChunkWriter> intFromInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(IntChunk::getEmptyChunk, + return new ByteChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(IntChunk::getEmptyChunk, + return new ShortChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return IntChunkWriter.INSTANCE; + return IntChunkWriter.IDENTITY_INSTANCE; case 64: - return new LongChunkWriter<>(IntChunk::getEmptyChunk, + return new LongChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -852,22 +888,22 @@ private static ChunkWriter> intFromInt( private static ChunkWriter> intFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(LongChunk::getEmptyChunk, + return new ByteChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(LongChunk::getEmptyChunk, + return new ShortChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return new IntChunkWriter<>(LongChunk::getEmptyChunk, + return new IntChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return LongChunkWriter.INSTANCE; + return LongChunkWriter.IDENTITY_INSTANCE; default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); } @@ -875,22 +911,22 @@ private static ChunkWriter> intFromLong( private static ChunkWriter> intFromObject( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(ObjectChunk::getEmptyChunk, + return new ByteChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(ObjectChunk::getEmptyChunk, + return new ShortChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return new IntChunkWriter<>(ObjectChunk::getEmptyChunk, + return new IntChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -899,22 +935,27 @@ private static ChunkWriter> intFromObject( private static ChunkWriter> intFromChar( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); + final boolean unsigned = !intType.getIsSigned(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(CharChunk::getEmptyChunk, + return new ByteChunkWriter<>(CharChunk::isNull, CharChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(CharChunk::getEmptyChunk, - (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); + if (unsigned) { + return CharChunkWriter.IDENTITY_INSTANCE; + } else { + return new ShortChunkWriter<>(CharChunk::isNull, CharChunk::getEmptyChunk, + (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); + } case 32: - return new IntChunkWriter<>(CharChunk::getEmptyChunk, + return new IntChunkWriter<>(CharChunk::isNull, CharChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(CharChunk::getEmptyChunk, + return new LongChunkWriter<>(CharChunk::isNull, CharChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -923,22 +964,22 @@ private static ChunkWriter> intFromChar( private static ChunkWriter> intFromFloat( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(FloatChunk::getEmptyChunk, + return new ByteChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(FloatChunk::getEmptyChunk, + return new ShortChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return new IntChunkWriter<>(FloatChunk::getEmptyChunk, + return new IntChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(FloatChunk::getEmptyChunk, + return new LongChunkWriter<>(FloatChunk::isNull, FloatChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -947,22 +988,22 @@ private static ChunkWriter> intFromFloat( private static ChunkWriter> intFromDouble( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.Int intType = (ArrowType.Int) arrowType; final int bitWidth = intType.getBitWidth(); switch (bitWidth) { case 8: - return new ByteChunkWriter<>(DoubleChunk::getEmptyChunk, + return new ByteChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii))); case 16: - return new ShortChunkWriter<>(DoubleChunk::getEmptyChunk, + return new ShortChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii))); case 32: - return new IntChunkWriter<>(DoubleChunk::getEmptyChunk, + return new IntChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case 64: - return new LongChunkWriter<>(DoubleChunk::getEmptyChunk, + return new LongChunkWriter<>(DoubleChunk::isNull, DoubleChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii))); default: throw new IllegalArgumentException("Unexpected bit width: " + bitWidth); @@ -971,16 +1012,16 @@ private static ChunkWriter> intFromDouble( private static ChunkWriter> boolFromBoolean( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { return new BooleanChunkWriter(); } private static ChunkWriter> fixedSizeBinaryFromByteArray( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { final ArrowType.FixedSizeBinary fixedSizeBinary = (ArrowType.FixedSizeBinary) arrowType; final int elementWidth = fixedSizeBinary.getByteWidth(); - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, elementWidth, false, + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, elementWidth, false, (out, chunk, offset) -> { final byte[] data = chunk.get(offset); if (data.length != elementWidth) { @@ -994,20 +1035,21 @@ private static ChunkWriter> fixedSizeBinaryFromByteA private static ChunkWriter> dateFromInt( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // see dateFromLocalDate's comment for more information on wire format final ArrowType.Date dateType = (ArrowType.Date) arrowType; switch (dateType.getUnit()) { case DAY: - return new IntChunkWriter<>(IntChunk::getEmptyChunk, + return new IntChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case MILLISECOND: - return new LongChunkWriter<>(IntChunk::getEmptyChunk, (chunk, ii) -> { + final long factor = Duration.ofDays(1).toMillis(); + return new LongChunkWriter<>(IntChunk::isNull, IntChunk::getEmptyChunk, (chunk, ii) -> { final long value = QueryLanguageFunctionUtils.longCast(chunk.get(ii)); return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG - : (value * ChunkWriter.MS_PER_DAY); + : (value * factor); }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -1016,20 +1058,21 @@ private static ChunkWriter> dateFromInt( private static ChunkWriter> dateFromLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // see dateFromLocalDate's comment for more information on wire format final ArrowType.Date dateType = (ArrowType.Date) arrowType; switch (dateType.getUnit()) { case DAY: - return new IntChunkWriter<>(LongChunk::getEmptyChunk, + return new IntChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii))); case MILLISECOND: - return new LongChunkWriter<>(LongChunk::getEmptyChunk, (chunk, ii) -> { + final long factor = Duration.ofDays(1).toMillis(); + return new LongChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, (chunk, ii) -> { final long value = chunk.get(ii); return value == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG - : (value * ChunkWriter.MS_PER_DAY); + : (value * factor); }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -1038,7 +1081,7 @@ private static ChunkWriter> dateFromLong( private static ChunkWriter> dateFromLocalDate( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { /* * Date is either a 32-bit or 64-bit signed integer type representing an elapsed time since UNIX epoch * (1970-01-01), stored in either of two units: @@ -1053,14 +1096,15 @@ private static ChunkWriter> dateFromLocalDate( final ArrowType.Date dateType = (ArrowType.Date) arrowType; switch (dateType.getUnit()) { case DAY: - return new IntChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + return new IntChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { final LocalDate value = chunk.get(ii); return value == null ? QueryConstants.NULL_INT : (int) value.toEpochDay(); }); case MILLISECOND: - return new LongChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + final long factor = Duration.ofDays(1).toMillis(); + return new LongChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { final LocalDate value = chunk.get(ii); - return value == null ? QueryConstants.NULL_LONG : value.toEpochDay() * ChunkWriter.MS_PER_DAY; + return value == null ? QueryConstants.NULL_LONG : value.toEpochDay() * factor; }); default: throw new IllegalArgumentException("Unexpected date unit: " + dateType.getUnit()); @@ -1069,7 +1113,7 @@ private static ChunkWriter> dateFromLocalDate( private static ChunkWriter> intervalFromDurationLong( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // See intervalFromPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; @@ -1080,7 +1124,10 @@ private static ChunkWriter> intervalFromDurationLong( "Do not support %s interval from duration as long conversion", intervalType)); case DAY_TIME: - return new FixedWidthChunkWriter<>(LongChunk::getEmptyChunk, Integer.BYTES * 2, false, + final long nsPerDay = Duration.ofDays(1).toNanos(); + final long nsPerMs = Duration.ofMillis(1).toNanos(); + return new FixedWidthChunkWriter<>(LongChunk::isNull, LongChunk::getEmptyChunk, Integer.BYTES * 2, + false, (out, source, offset) -> { final long value = source.get(offset); if (value == QueryConstants.NULL_LONG) { @@ -1088,8 +1135,8 @@ private static ChunkWriter> intervalFromDurationLong( out.writeInt(0); } else { // days then millis - out.writeInt((int) (value / ChunkWriter.NS_PER_DAY)); - out.writeInt((int) ((value % ChunkWriter.NS_PER_DAY) / ChunkWriter.NS_PER_MS)); + out.writeInt((int) (value / nsPerDay)); + out.writeInt((int) ((value % nsPerDay) / nsPerMs)); } }); @@ -1100,7 +1147,7 @@ private static ChunkWriter> intervalFromDurationLong( private static ChunkWriter> intervalFromDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // See intervalFromPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; @@ -1111,7 +1158,9 @@ private static ChunkWriter> intervalFromDuration( "Do not support %s interval from duration as long conversion", intervalType)); case DAY_TIME: - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, Integer.BYTES * 2, false, + final long nsPerMs = Duration.ofMillis(1).toNanos(); + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, Integer.BYTES * 2, + false, (out, source, offset) -> { final Duration value = source.get(offset); if (value == null) { @@ -1120,7 +1169,7 @@ private static ChunkWriter> intervalFromDuration( } else { // days then millis out.writeInt((int) value.toDays()); - out.writeInt((int) (value.getNano() / ChunkWriter.NS_PER_MS)); + out.writeInt((int) (value.getNano() / nsPerMs)); } }); @@ -1131,7 +1180,7 @@ private static ChunkWriter> intervalFromDuration( private static ChunkWriter> intervalFromPeriod( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { /* * A "calendar" interval which models types that don't necessarily have a precise duration without the context * of a base timestamp (e.g. days can differ in length during day light savings time transitions). All integers @@ -1157,12 +1206,13 @@ private static ChunkWriter> intervalFromPeriod( final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; switch (intervalType.getUnit()) { case YEAR_MONTH: - return new IntChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + return new IntChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { final Period value = chunk.get(ii); return value == null ? QueryConstants.NULL_INT : value.getMonths() + value.getYears() * 12; }); case DAY_TIME: - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, Integer.BYTES * 2, false, + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, Integer.BYTES * 2, + false, (out, chunk, offset) -> { final Period value = chunk.get(offset); if (value == null) { @@ -1175,7 +1225,8 @@ private static ChunkWriter> intervalFromPeriod( } }); case MONTH_DAY_NANO: - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, Integer.BYTES * 2 + Long.BYTES, false, + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + Integer.BYTES * 2 + Long.BYTES, false, (out, chunk, offset) -> { final Period value = chunk.get(offset); if (value == null) { @@ -1195,18 +1246,19 @@ private static ChunkWriter> intervalFromPeriod( private static ChunkWriter> intervalFromPeriodDuration( final ArrowType arrowType, - final ChunkReader.TypeInfo typeInfo) { + final BarrageTypeInfo typeInfo) { // See intervalToPeriod's comment for more information on wire format. final ArrowType.Interval intervalType = (ArrowType.Interval) arrowType; switch (intervalType.getUnit()) { case YEAR_MONTH: - return new IntChunkWriter<>(ObjectChunk::getEmptyChunk, (chunk, ii) -> { + return new IntChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, (chunk, ii) -> { final Period value = chunk.get(ii).getPeriod(); return value == null ? QueryConstants.NULL_INT : value.getMonths() + value.getYears() * 12; }); case DAY_TIME: - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, Integer.BYTES * 2, false, + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, Integer.BYTES * 2, + false, (out, chunk, offset) -> { final PeriodDuration value = chunk.get(offset); if (value == null) { @@ -1219,7 +1271,8 @@ private static ChunkWriter> intervalFromPeri } }); case MONTH_DAY_NANO: - return new FixedWidthChunkWriter<>(ObjectChunk::getEmptyChunk, Integer.BYTES * 2 + Long.BYTES, false, + return new FixedWidthChunkWriter<>(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, + Integer.BYTES * 2 + Long.BYTES, false, (out, chunk, offset) -> { final PeriodDuration value = chunk.get(offset); if (value == null) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java index 4ae5b478b6f..70af51ac0a7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java @@ -1,6 +1,10 @@ // // Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending // +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit FloatChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off package io.deephaven.extensions.barrage.chunk; import io.deephaven.base.verify.Assert; @@ -8,7 +12,7 @@ import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.util.Float16; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.extensions.barrage.util.Float16; import io.deephaven.util.QueryConstants; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.apache.arrow.flatbuf.Precision; @@ -38,13 +42,13 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final short precisionFlatbufId; - private final ChunkReader.Options options; + private final short precisionFlatBufId; + private final BarrageOptions options; public DoubleChunkReader( final short precisionFlatbufId, - final ChunkReader.Options options) { - this.precisionFlatbufId = precisionFlatbufId; + final BarrageOptions options) { + this.precisionFlatBufId = precisionFlatbufId; this.options = options; } @@ -91,9 +95,9 @@ public WritableDoubleChunk readChunk( Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { - useDeephavenNulls(precisionFlatbufId, is, nodeInfo, chunk, outOffset); + useDeephavenNulls(precisionFlatBufId, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(precisionFlatbufId, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(precisionFlatBufId, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -106,27 +110,31 @@ public WritableDoubleChunk readChunk( } private static void useDeephavenNulls( - final short precisionFlatbufId, + final short precisionFlatBufId, final DataInput is, final ChunkWriter.FieldNodeInfo nodeInfo, final WritableDoubleChunk chunk, final int offset) throws IOException { - switch (precisionFlatbufId) { + switch (precisionFlatBufId) { case Precision.HALF: throw new IllegalStateException("Cannot use Deephaven nulls with half-precision floats"); case Precision.SINGLE: for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + // region PrecisionSingleDhNulls final float v = is.readFloat(); - chunk.set(offset + ii, v == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : v); + chunk.set(offset + ii, doubleCast(v)); + // endregion PrecisionSingleDhNulls } break; case Precision.DOUBLE: for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + // region PrecisionDoubleDhNulls chunk.set(offset + ii, is.readDouble()); + // endregion PrecisionDoubleDhNulls } break; default: - throw new IllegalStateException("Unsupported floating point precision: " + precisionFlatbufId); + throw new IllegalStateException("Unsupported floating point precision: " + precisionFlatBufId); } } @@ -135,12 +143,14 @@ private interface DoubleSupplier { double next() throws IOException; } + // region FPCastHelper private static double doubleCast(float a) { return a == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : (double) a; } + // endregion FPCastHelper private static void useValidityBuffer( - final short precisionFlatbufId, + final short precisionFlatBufId, final DataInput is, final ChunkWriter.FieldNodeInfo nodeInfo, final WritableDoubleChunk chunk, @@ -154,21 +164,25 @@ private static void useValidityBuffer( final int elementSize; final DoubleSupplier supplier; - switch (precisionFlatbufId) { + switch (precisionFlatBufId) { case Precision.HALF: elementSize = Short.BYTES; supplier = () -> Float16.toFloat(is.readShort()); break; case Precision.SINGLE: + // region PrecisionSingleValidityBuffer elementSize = Float.BYTES; supplier = () -> doubleCast(is.readFloat()); + // endregion PrecisionSingleValidityBuffer break; case Precision.DOUBLE: elementSize = Double.BYTES; + // region PrecisionDoubleValidityBuffer supplier = is::readDouble; + // endregion PrecisionDoubleValidityBuffer break; default: - throw new IllegalStateException("Unsupported floating point precision: " + precisionFlatbufId); + throw new IllegalStateException("Unsupported floating point precision: " + precisionFlatBufId); } for (int vi = 0; vi < numValidityWords; ++vi) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkWriter.java index c590011ac42..4c849042d29 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.DoubleChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class DoubleChunkWriter> extends BaseChunkWriter { +public class DoubleChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "DoubleChunkWriter"; - public static final DoubleChunkWriter> INSTANCE = new DoubleChunkWriter<>( - DoubleChunk::getEmptyChunk, DoubleChunk::get); + public static final DoubleChunkWriter> IDENTITY_INSTANCE = new DoubleChunkWriter<>( + DoubleChunk::isNull, DoubleChunk::getEmptyChunk, DoubleChunk::get); @FunctionalInterface public interface ToDoubleTransformFunction> { double get(SourceChunkType sourceValues, int offset); } - private final ToDoubleTransformFunction transform; + private final ToDoubleTransformFunction transform; public DoubleChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToDoubleTransformFunction transform) { - super(emptyChunkSupplier, Double.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToDoubleTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Double.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new DoubleChunkInputStream(context, subset, options); } - private class DoubleChunkInputStream extends BaseChunkInputStream> { + private class DoubleChunkInputStream extends BaseChunkInputStream> { private DoubleChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkReader.java index be811192228..5214f5b59d7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkReader.java @@ -7,6 +7,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -26,13 +27,13 @@ public interface TypeConversion { private final boolean useDeephavenNulls; private final int elementSize; - private final ChunkReader.Options options; + private final BarrageOptions options; private final TypeConversion conversion; public FixedWidthChunkReader( final int elementSize, final boolean dhNullable, - final ChunkReader.Options options, + final BarrageOptions options, final TypeConversion conversion) { this.elementSize = elementSize; this.options = options; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkWriter.java index 0301c516a2d..d159dc7f559 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkWriter.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -17,7 +18,7 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class FixedWidthChunkWriter> extends BaseChunkWriter { +public class FixedWidthChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "FixedWidthChunkWriter"; @FunctionalInterface @@ -25,30 +26,31 @@ public interface Appender> { void append(@NotNull DataOutput os, @NotNull SourceChunkType sourceValues, int offset) throws IOException; } - private final Appender appendItem; + private final Appender appendItem; public FixedWidthChunkWriter( - @NotNull final Supplier emptyChunkSupplier, + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, final int elementSize, final boolean dhNullable, - final Appender appendItem) { - super(emptyChunkSupplier, elementSize, dhNullable); + final Appender appendItem) { + super(isRowNullProvider, emptyChunkSupplier, elementSize, dhNullable); this.appendItem = appendItem; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new FixedWidthChunkInputStream(context, subset, options); } - private class FixedWidthChunkInputStream extends BaseChunkInputStream> { + private class FixedWidthChunkInputStream extends BaseChunkInputStream> { private FixedWidthChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java index a30b96fee24..48286dab83f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.extensions.barrage.util.Float16; import io.deephaven.util.QueryConstants; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -39,11 +40,11 @@ public static , T extends ChunkReade } private final short precisionFlatBufId; - private final ChunkReader.Options options; + private final BarrageOptions options; public FloatChunkReader( final short precisionFlatbufId, - final ChunkReader.Options options) { + final BarrageOptions options) { this.precisionFlatBufId = precisionFlatbufId; this.options = options; } @@ -116,13 +117,17 @@ private static void useDeephavenNulls( throw new IllegalStateException("Cannot use Deephaven nulls with half-precision floats"); case Precision.SINGLE: for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + // region PrecisionSingleDhNulls chunk.set(offset + ii, is.readFloat()); + // endregion PrecisionSingleDhNulls } break; case Precision.DOUBLE: for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + // region PrecisionDoubleDhNulls final double v = is.readDouble(); - chunk.set(offset + ii, v == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_FLOAT : (float) v); + chunk.set(offset + ii, floatCast(v)); + // endregion PrecisionDoubleDhNulls } break; default: @@ -135,9 +140,11 @@ private interface FloatSupplier { float next() throws IOException; } + // region FPCastHelper private static float floatCast(double a) { return a == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_FLOAT : (float) a; } + // endregion FPCastHelper private static void useValidityBuffer( final short precisionFlatBufId, @@ -160,12 +167,16 @@ private static void useValidityBuffer( supplier = () -> Float16.toFloat(is.readShort()); break; case Precision.SINGLE: + // region PrecisionSingleValidityBuffer elementSize = Float.BYTES; supplier = is::readFloat; + // endregion PrecisionSingleValidityBuffer break; case Precision.DOUBLE: elementSize = Double.BYTES; + // region PrecisionDoubleValidityBuffer supplier = () -> floatCast(is.readDouble()); + // endregion PrecisionDoubleValidityBuffer break; default: throw new IllegalStateException("Unsupported floating point precision: " + precisionFlatBufId); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkWriter.java index 02b27b8b882..5bd066451ec 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.FloatChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class FloatChunkWriter> extends BaseChunkWriter { +public class FloatChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "FloatChunkWriter"; - public static final FloatChunkWriter> INSTANCE = new FloatChunkWriter<>( - FloatChunk::getEmptyChunk, FloatChunk::get); + public static final FloatChunkWriter> IDENTITY_INSTANCE = new FloatChunkWriter<>( + FloatChunk::isNull, FloatChunk::getEmptyChunk, FloatChunk::get); @FunctionalInterface public interface ToFloatTransformFunction> { float get(SourceChunkType sourceValues, int offset); } - private final ToFloatTransformFunction transform; + private final ToFloatTransformFunction transform; public FloatChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToFloatTransformFunction transform) { - super(emptyChunkSupplier, Float.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToFloatTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Float.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new FloatChunkInputStream(context, subset, options); } - private class FloatChunkInputStream extends BaseChunkInputStream> { + private class FloatChunkInputStream extends BaseChunkInputStream> { private FloatChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java index 562bc6cd475..8ec029e0858 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java @@ -13,7 +13,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -29,13 +29,13 @@ public class IntChunkReader extends BaseChunkReader> { private static final String DEBUG_NAME = "IntChunkReader"; @FunctionalInterface - public interface ToIntTransformFunction> { - int get(WireChunkType wireValues, int wireOffset); + public interface ToIntTransformFunction> { + int get(WIRE_CHUNK_TYPE wireValues, int wireOffset); } - public static , T extends ChunkReader> ChunkReader> transformTo( + public static , T extends ChunkReader> ChunkReader> transformTo( final T wireReader, - final ToIntTransformFunction wireTransform) { + final ToIntTransformFunction wireTransform) { return new TransformingChunkReader<>( wireReader, WritableIntChunk::makeWritableChunk, @@ -44,7 +44,7 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final ChunkReader.Options options; + private final BarrageOptions options; private final IntConversion conversion; @FunctionalInterface @@ -54,11 +54,11 @@ public interface IntConversion { IntConversion IDENTITY = (int a) -> a; } - public IntChunkReader(ChunkReader.Options options) { + public IntChunkReader(BarrageOptions options) { this(options, IntConversion.IDENTITY); } - public IntChunkReader(ChunkReader.Options options, IntConversion conversion) { + public IntChunkReader(BarrageOptions options, IntConversion conversion) { this.options = options; this.conversion = conversion; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkWriter.java index 62bcbc864e0..e200591c265 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.IntChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class IntChunkWriter> extends BaseChunkWriter { +public class IntChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "IntChunkWriter"; - public static final IntChunkWriter> INSTANCE = new IntChunkWriter<>( - IntChunk::getEmptyChunk, IntChunk::get); + public static final IntChunkWriter> IDENTITY_INSTANCE = new IntChunkWriter<>( + IntChunk::isNull, IntChunk::getEmptyChunk, IntChunk::get); @FunctionalInterface public interface ToIntTransformFunction> { int get(SourceChunkType sourceValues, int offset); } - private final ToIntTransformFunction transform; + private final ToIntTransformFunction transform; public IntChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToIntTransformFunction transform) { - super(emptyChunkSupplier, Integer.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToIntTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Integer.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new IntChunkInputStream(context, subset, options); } - private class IntChunkInputStream extends BaseChunkInputStream> { + private class IntChunkInputStream extends BaseChunkInputStream> { private IntChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ListChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ListChunkWriter.java index 4bbba35be08..b29df300aad 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ListChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ListChunkWriter.java @@ -12,6 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -33,7 +34,7 @@ public ListChunkWriter( final int fixedSizeLength, final ExpansionKernel kernel, final ChunkWriter componentWriter) { - super(ObjectChunk::getEmptyChunk, 0, false); + super(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, 0, false); this.mode = mode; this.fixedSizeLength = fixedSizeLength; this.kernel = kernel; @@ -69,8 +70,8 @@ public Context( } @Override - public void close() { - super.close(); + protected void onReferenceCountAtZero() { + super.onReferenceCountAtZero(); offsets.close(); innerContext.close(); } @@ -80,7 +81,7 @@ public void close() { public DrainableColumn getInputStream( @NotNull final ChunkWriter.Context> context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new ListChunkInputStream((Context) context, subset, options); } @@ -93,7 +94,7 @@ private class ListChunkInputStream extends BaseChunkInputStream { private ListChunkInputStream( @NotNull final Context context, @Nullable final RowSet mySubset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { super(context, mySubset, options); if (subset == null || subset.size() == context.size()) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java index decad1d77fe..beda3d71e3a 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java @@ -10,10 +10,9 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -29,13 +28,13 @@ public class LongChunkReader extends BaseChunkReader> private static final String DEBUG_NAME = "LongChunkReader"; @FunctionalInterface - public interface ToLongTransformFunction> { - long get(WireChunkType wireValues, int wireOffset); + public interface ToLongTransformFunction> { + long get(WIRE_CHUNK_TYPE wireValues, int wireOffset); } - public static , T extends ChunkReader> ChunkReader> transformTo( + public static , T extends ChunkReader> ChunkReader> transformTo( final T wireReader, - final ToLongTransformFunction wireTransform) { + final ToLongTransformFunction wireTransform) { return new TransformingChunkReader<>( wireReader, WritableLongChunk::makeWritableChunk, @@ -44,7 +43,7 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final ChunkReader.Options options; + private final BarrageOptions options; private final LongConversion conversion; @FunctionalInterface @@ -54,11 +53,11 @@ public interface LongConversion { LongConversion IDENTITY = (long a) -> a; } - public LongChunkReader(ChunkReader.Options options) { + public LongChunkReader(BarrageOptions options) { this(options, LongConversion.IDENTITY); } - public LongChunkReader(ChunkReader.Options options, LongConversion conversion) { + public LongChunkReader(BarrageOptions options, LongConversion conversion) { this.options = options; this.conversion = conversion; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkWriter.java index b9574744fd9..3d3b884f722 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.LongChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class LongChunkWriter> extends BaseChunkWriter { +public class LongChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "LongChunkWriter"; - public static final LongChunkWriter> INSTANCE = new LongChunkWriter<>( - LongChunk::getEmptyChunk, LongChunk::get); + public static final LongChunkWriter> IDENTITY_INSTANCE = new LongChunkWriter<>( + LongChunk::isNull, LongChunk::getEmptyChunk, LongChunk::get); @FunctionalInterface public interface ToLongTransformFunction> { long get(SourceChunkType sourceValues, int offset); } - private final ToLongTransformFunction transform; + private final ToLongTransformFunction transform; public LongChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToLongTransformFunction transform) { - super(emptyChunkSupplier, Long.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToLongTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Long.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new LongChunkInputStream(context, subset, options); } - private class LongChunkInputStream extends BaseChunkInputStream> { + private class LongChunkInputStream extends BaseChunkInputStream> { private LongChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkWriter.java index 43a2c07869f..20d399d4125 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkWriter.java @@ -6,6 +6,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.extensions.barrage.BarrageOptions; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -13,16 +14,17 @@ import java.io.OutputStream; public class NullChunkWriter> extends BaseChunkWriter { + public static final NullChunkWriter> INSTANCE = new NullChunkWriter<>(); public NullChunkWriter() { - super(() -> null, 0, true); + super((chunk, idx) -> true, () -> null, 0, true); } @Override public DrainableColumn getInputStream( @NotNull final Context chunk, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new NullDrainableColumn(); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java index b90ce6b6928..09160dfea1f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java @@ -13,7 +13,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -29,13 +29,13 @@ public class ShortChunkReader extends BaseChunkReader private static final String DEBUG_NAME = "ShortChunkReader"; @FunctionalInterface - public interface ToShortTransformFunction> { - short get(WireChunkType wireValues, int wireOffset); + public interface ToShortTransformFunction> { + short get(WIRE_CHUNK_TYPE wireValues, int wireOffset); } - public static , T extends ChunkReader> ChunkReader> transformTo( + public static , T extends ChunkReader> ChunkReader> transformTo( final T wireReader, - final ToShortTransformFunction wireTransform) { + final ToShortTransformFunction wireTransform) { return new TransformingChunkReader<>( wireReader, WritableShortChunk::makeWritableChunk, @@ -44,7 +44,7 @@ public static , T extends ChunkReade outOffset, wireTransform.get(wireValues, wireOffset))); } - private final ChunkReader.Options options; + private final BarrageOptions options; private final ShortConversion conversion; @FunctionalInterface @@ -54,11 +54,11 @@ public interface ShortConversion { ShortConversion IDENTITY = (short a) -> a; } - public ShortChunkReader(ChunkReader.Options options) { + public ShortChunkReader(BarrageOptions options) { this(options, ShortConversion.IDENTITY); } - public ShortChunkReader(ChunkReader.Options options, ShortConversion conversion) { + public ShortChunkReader(BarrageOptions options, ShortConversion conversion) { this.options = options; this.conversion = conversion; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkWriter.java index 23f0b5f3149..23a5aeef5f2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkWriter.java @@ -12,7 +12,7 @@ import io.deephaven.engine.rowset.RowSet; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; -import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.extensions.barrage.BarrageOptions;import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ShortChunk; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -21,38 +21,39 @@ import java.io.OutputStream; import java.util.function.Supplier; -public class ShortChunkWriter> extends BaseChunkWriter { +public class ShortChunkWriter> extends BaseChunkWriter { private static final String DEBUG_NAME = "ShortChunkWriter"; - public static final ShortChunkWriter> INSTANCE = new ShortChunkWriter<>( - ShortChunk::getEmptyChunk, ShortChunk::get); + public static final ShortChunkWriter> IDENTITY_INSTANCE = new ShortChunkWriter<>( + ShortChunk::isNull, ShortChunk::getEmptyChunk, ShortChunk::get); @FunctionalInterface public interface ToShortTransformFunction> { short get(SourceChunkType sourceValues, int offset); } - private final ToShortTransformFunction transform; + private final ToShortTransformFunction transform; public ShortChunkWriter( - @NotNull final Supplier emptyChunkSupplier, - @Nullable final ToShortTransformFunction transform) { - super(emptyChunkSupplier, Short.BYTES, true); + @NotNull final IsRowNullProvider isRowNullProvider, + @NotNull final Supplier emptyChunkSupplier, + @Nullable final ToShortTransformFunction transform) { + super(isRowNullProvider, emptyChunkSupplier, Short.BYTES, true); this.transform = transform; } @Override public DrainableColumn getInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new ShortChunkInputStream(context, subset, options); } - private class ShortChunkInputStream extends BaseChunkInputStream> { + private class ShortChunkInputStream extends BaseChunkInputStream> { private ShortChunkInputStream( - @NotNull final Context context, + @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageOptions options) { super(context, subset, options); } @@ -66,8 +67,7 @@ public void visitBuffers(final BufferListener listener) { // validity listener.noteLogicalBuffer(sendValidityBuffer() ? getValidityMapSerializationSizeFor(subset.intSize()) : 0); // payload - long length = elementSize * subset.size(); - listener.noteLogicalBuffer(padBufferSize(length)); + listener.noteLogicalBuffer(padBufferSize(elementSize * subset.size())); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkWriter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkWriter.java index aa15f6b2493..d6084b0e289 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkWriter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkWriter.java @@ -9,6 +9,7 @@ import io.deephaven.chunk.attributes.ChunkPositions; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.ChunkPoolConstants; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.util.SafeCloseable; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.engine.rowset.RowSet; @@ -33,7 +34,7 @@ public interface Appender { public VarBinaryChunkWriter( final Appender appendItem) { - super(ObjectChunk::getEmptyChunk, 0, false); + super(ObjectChunk::isNull, ObjectChunk::getEmptyChunk, 0, false); this.appendItem = appendItem; } @@ -41,7 +42,7 @@ public VarBinaryChunkWriter( public DrainableColumn getInputStream( @NotNull final ChunkWriter.Context> context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { return new ObjectChunkInputStream((Context) context, subset, options); } @@ -67,18 +68,23 @@ public Context( } for (int ii = 0; ii < chunk.size(); ++ii) { - if (chunk.isNullAt(ii)) { - continue; - } - try { - appendItem.append(byteStorage, chunk.get(ii)); - } catch (final IOException ioe) { - throw new UncheckedDeephavenException( - "Unexpected exception while draining data to OutputStream: ", ioe); + if (!chunk.isNull(ii)) { + try { + appendItem.append(byteStorage, chunk.get(ii)); + } catch (final IOException ioe) { + throw new UncheckedDeephavenException( + "Unexpected exception while draining data to OutputStream: ", ioe); + } } byteStorage.offsets.set(ii + 1, byteStorage.size()); } } + + @Override + protected void onReferenceCountAtZero() { + super.onReferenceCountAtZero(); + byteStorage.close(); + } } private class ObjectChunkInputStream extends BaseChunkInputStream { @@ -88,7 +94,7 @@ private class ObjectChunkInputStream extends BaseChunkInputStream { private ObjectChunkInputStream( @NotNull final Context context, @Nullable final RowSet subset, - @NotNull final ChunkReader.Options options) throws IOException { + @NotNull final BarrageOptions options) throws IOException { super(context, subset, options); } @@ -154,11 +160,11 @@ public int drainTo(final OutputStream outputStream) throws IOException { } read = true; - long bytesWritten = 0; + final MutableLong bytesWritten = new MutableLong(); final LittleEndianDataOutputStream dos = new LittleEndianDataOutputStream(outputStream); // write the validity buffer - bytesWritten += writeValidityBuffer(dos); + bytesWritten.add(writeValidityBuffer(dos)); // write offsets array dos.writeInt(0); @@ -173,17 +179,23 @@ public int drainTo(final OutputStream outputStream) throws IOException { throw new UncheckedDeephavenException("couldn't drain data to OutputStream", e); } }); - bytesWritten += Integer.BYTES * (subset.size() + 1); + bytesWritten.add(Integer.BYTES * (subset.size() + 1)); if ((subset.size() & 0x1) == 0) { // then we must pad to align next buffer dos.writeInt(0); - bytesWritten += Integer.BYTES; + bytesWritten.add(Integer.BYTES); } - bytesWritten += context.byteStorage.writePayload(dos, 0, subset.intSize() - 1); - bytesWritten += writePadBuffer(dos, bytesWritten); - return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); + subset.forAllRowKeyRanges((s, e) -> { + try { + bytesWritten.add(context.byteStorage.writePayload(dos, (int) s, (int) e)); + } catch (IOException ex) { + throw new UncheckedDeephavenException("couldn't drain data to OutputStream", ex); + } + }); + bytesWritten.add(writePadBuffer(dos, bytesWritten.get())); + return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten.get()); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 05dec0aca2f..c639d414095 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,7 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.DefaultChunkReaderFactory; @@ -35,7 +36,6 @@ import java.util.List; import java.util.PrimitiveIterator; -import static io.deephaven.extensions.barrage.chunk.ChunkReader.typeInfo; import static io.deephaven.extensions.barrage.util.BarrageProtoUtil.DEFAULT_SER_OPTIONS; /** @@ -158,7 +158,7 @@ protected void parseSchema(final Message message) { componentTypes = result.computeWireComponentTypes(); for (int i = 0; i < schema.fieldsLength(); i++) { readers.add(DefaultChunkReaderFactory.INSTANCE.newReader( - typeInfo(columnTypes[i], componentTypes[i], schema.fields(i)), options)); + BarrageTypeInfo.make(columnTypes[i], componentTypes[i], schema.fields(i)), options)); } // retain reference until the resultTable can be sealed diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReader.java index e5603e9ba76..88d3b635d90 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReader.java @@ -5,7 +5,7 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.engine.table.impl.util.BarrageMessage; -import io.deephaven.extensions.barrage.chunk.ChunkReader; +import io.deephaven.extensions.barrage.BarrageOptions; import java.io.InputStream; @@ -24,7 +24,7 @@ public interface BarrageMessageReader { * @param stream the input stream that holds the message to be parsed * @return a BarrageMessage filled out by the stream's payload */ - BarrageMessage safelyParseFrom(final ChunkReader.Options options, + BarrageMessage safelyParseFrom(final BarrageOptions options, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReaderImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReaderImpl.java index 713cd8d2607..4ea7be34256 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReaderImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageMessageReaderImpl.java @@ -19,6 +19,8 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.util.*; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.DefaultChunkReaderFactory; @@ -35,7 +37,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; -import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; @@ -44,8 +45,6 @@ import java.util.PrimitiveIterator; import java.util.function.LongConsumer; -import static io.deephaven.extensions.barrage.chunk.ChunkReader.typeInfo; - public class BarrageMessageReaderImpl implements BarrageMessageReader { private static final Logger log = LoggerFactory.getLogger(BarrageMessageReaderImpl.class); @@ -70,7 +69,7 @@ public BarrageMessageReaderImpl(final LongConsumer deserializeTmConsumer) { } @Override - public BarrageMessage safelyParseFrom(final ChunkReader.Options options, + public BarrageMessage safelyParseFrom(final BarrageOptions options, final ChunkType[] columnChunkTypes, final Class[] columnTypes, final Class[] componentTypes, @@ -301,7 +300,8 @@ public BarrageMessage safelyParseFrom(final ChunkReader.Options options, Field field = schema.fields(i); final Class columnType = ReinterpretUtils.maybeConvertToPrimitiveDataType(columnTypes[i]); - readers.add(chunkReaderFactory.newReader(typeInfo(columnType, componentTypes[i], field), options)); + readers.add(chunkReaderFactory.newReader( + BarrageTypeInfo.make(columnType, componentTypes[i], field), options)); } return null; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 1f64d7f7c4b..3129b9511e0 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -30,9 +30,10 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.extensions.barrage.BarrageMessageWriter; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; -import io.deephaven.extensions.barrage.chunk.ChunkReader; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.DefaultChunkWriterFactory; import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; @@ -221,7 +222,7 @@ public static ByteString schemaBytes(@NotNull final ToIntFunction attributes, final boolean isFlat) { @@ -782,7 +783,7 @@ public static void createAndSendStaticSnapshot( // noinspection unchecked final ChunkWriter>[] chunkWriters = table.getDefinition().getColumns().stream() - .map(cd -> DefaultChunkWriterFactory.INSTANCE.newWriter(ChunkReader.typeInfo( + .map(cd -> DefaultChunkWriterFactory.INSTANCE.newWriter(BarrageTypeInfo.make( ReinterpretUtils.maybeConvertToPrimitiveDataType(cd.getDataType()), cd.getComponentType(), flatbufFieldFor(cd, Map.of())))) @@ -888,7 +889,7 @@ public static void createAndSendSnapshot( // noinspection unchecked final ChunkWriter>[] chunkWriters = table.getDefinition().getColumns().stream() - .map(cd -> DefaultChunkWriterFactory.INSTANCE.newWriter(ChunkReader.typeInfo( + .map(cd -> DefaultChunkWriterFactory.INSTANCE.newWriter(BarrageTypeInfo.make( ReinterpretUtils.maybeConvertToPrimitiveDataType(cd.getDataType()), cd.getComponentType(), flatbufFieldFor(cd, Map.of())))) diff --git a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml index b1b73cfb03a..d2ba7c69b19 100644 --- a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml +++ b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml @@ -2,7 +2,7 @@ - + diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index 82946b94547..f2ca898e18a 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -15,6 +15,7 @@ import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.extensions.barrage.BarrageOptions; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.WritableByteChunk; @@ -26,6 +27,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.WritableShortChunk; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; import io.deephaven.proto.flight.util.SchemaHelper; @@ -56,8 +58,6 @@ import java.util.function.IntFunction; import java.util.stream.LongStream; -import static io.deephaven.extensions.barrage.chunk.ChunkReader.typeInfo; - public class BarrageColumnRoundTripTest extends RefreshingTableTestCase { private static final BarrageSubscriptionOptions OPT_DEFAULT_DH_NULLS = @@ -73,7 +73,7 @@ public class BarrageColumnRoundTripTest extends RefreshingTableTestCase { }; private static WritableChunk readChunk( - final ChunkReader.Options options, + final BarrageOptions options, final Class type, final Class componentType, final Field field, @@ -84,7 +84,7 @@ private static WritableChunk readChunk( final int offset, final int totalRows) throws IOException { return DefaultChunkReaderFactory.INSTANCE - .newReader(typeInfo(type, componentType, field), options) + .newReader(BarrageTypeInfo.make(type, componentType, field), options) .readChunk(fieldNodeIter, bufferInfoIter, is, outChunk, offset, totalRows); } @@ -661,6 +661,7 @@ public void assertExpected( private static void testRoundTripSerialization( final BarrageSubscriptionOptions options, final Class type, final Consumer> initData, final Validator validator) throws IOException { + final int NUM_ROWS = 8; final ChunkType chunkType; if (type == Boolean.class || type == boolean.class) { chunkType = ChunkType.Byte; @@ -680,15 +681,15 @@ private static void testRoundTripSerialization( Schema schema = SchemaHelper.flatbufSchema(schemaBytes.asReadOnlyByteBuffer()); Field field = schema.fields(0); - final WritableChunk srcData = chunkType.makeWritableChunk(4096); + final WritableChunk srcData = chunkType.makeWritableChunk(NUM_ROWS); initData.accept(srcData); // The writer owns data; it is allowed to close it prematurely if the data needs to be converted to primitive - final WritableChunk data = chunkType.makeWritableChunk(4096); + final WritableChunk data = chunkType.makeWritableChunk(NUM_ROWS); data.copyFromChunk(srcData, 0, 0, srcData.size()); final ChunkWriter> writer = DefaultChunkWriterFactory.INSTANCE - .newWriter(ChunkReader.typeInfo(type, type.getComponentType(), field)); + .newWriter(BarrageTypeInfo.make(type, type.getComponentType(), field)); try (SafeCloseable ignored = srcData; final ChunkWriter.Context> context = writer.makeContext(data, 0)) { // full sub logic @@ -700,7 +701,13 @@ private static void testRoundTripSerialization( .add(new ChunkWriter.FieldNodeInfo(numElements, nullCount))); final LongStream.Builder bufferNodes = LongStream.builder(); column.visitBuffers(bufferNodes::add); + final int startSize = baos.size(); + final int available = column.available(); column.drainTo(baos); + if (available != baos.size() - startSize) { + throw new IllegalStateException("available=" + available + ", baos.size()=" + baos.size()); + } + final DataInput dis = new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); try (final WritableChunk rtData = readChunk(options, readType, readType.getComponentType(), diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java index fc0ea3f4127..657d1b70005 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java @@ -24,6 +24,9 @@ public static void main(final String[] args) throws IOException { ReplicatePrimitiveCode.charToAllButBooleanAndFloats("replicateBarrageUtils", CHUNK_PACKAGE + "/CharChunkReader.java"); + ReplicatePrimitiveCode.floatToAllFloatingPoints("replicateBarrageUtils", + CHUNK_PACKAGE + "/FloatChunkReader.java", "Float16"); + fixupDoubleChunkReader(CHUNK_PACKAGE + "/DoubleChunkReader.java"); ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", CHUNK_PACKAGE + "/array/CharArrayExpansionKernel.java"); @@ -38,21 +41,36 @@ public static void main(final String[] args) throws IOException { "web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java"); } - private static void fixupVectorExpansionKernel(final @NotNull String path, final @NotNull String type) - throws IOException { + private static void fixupDoubleChunkReader(final @NotNull String path) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); - lines = removeImport(lines, "import io.deephaven.engine.primitive.function." + type + "Consumer;"); - lines = addImport(lines, "import java.util.function." + type + "Consumer;"); + lines = globalReplacements(lines, + "Float16.toDouble", "Float16.toFloat", + "doubleing point precision", "floating point precision", + "half-precision doubles", "half-precision floats"); + lines = replaceRegion(lines, "PrecisionSingleDhNulls", List.of( + " final float v = is.readFloat();", + " chunk.set(offset + ii, doubleCast(v));")); + lines = replaceRegion(lines, "PrecisionDoubleDhNulls", List.of( + " chunk.set(offset + ii, is.readDouble());")); + lines = replaceRegion(lines, "PrecisionSingleValidityBuffer", List.of( + " elementSize = Float.BYTES;", + " supplier = () -> doubleCast(is.readFloat());")); + lines = replaceRegion(lines, "PrecisionDoubleValidityBuffer", List.of( + " supplier = is::readDouble;")); + lines = replaceRegion(lines, "FPCastHelper", List.of( + " private static double doubleCast(float a) {", + " return a == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : (double) a;", + " }")); FileUtils.writeLines(file, lines); } - private static void fixupChunkWriterGen(final @NotNull String path, final @NotNull String type) + private static void fixupVectorExpansionKernel(final @NotNull String path, final @NotNull String type) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); - lines = removeImport(lines, "import io.deephaven.engine.primitive.function.To" + type + "Function;"); - lines = addImport(lines, "import java.util.function.To" + type + "Function;"); + lines = removeImport(lines, "import io.deephaven.engine.primitive.function." + type + "Consumer;"); + lines = addImport(lines, "import java.util.function." + type + "Consumer;"); FileUtils.writeLines(file, lines); } } diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java index 9e19531062e..c0237781c89 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateSourcesAndChunks.java @@ -597,6 +597,10 @@ private static void replicateBooleanChunks() throws IOException { classLines = ReplicationUtils.removeRegion(classLines, "CopyToBuffer"); classLines = ReplicationUtils.removeRegion(classLines, "BinarySearchImports"); classLines = ReplicationUtils.removeRegion(classLines, "BinarySearch"); + classLines = ReplicationUtils.replaceRegion(classLines, "isNull", Arrays.asList( + " public final boolean isNull(int index) {", + " return false;", + " }")); FileUtils.writeLines(classFile, classLines); } diff --git a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java index 08f567486eb..72c78a98de8 100644 --- a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java +++ b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java @@ -36,7 +36,7 @@ import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.BarrageSubscriptionPerformanceLogger; -import io.deephaven.extensions.barrage.chunk.ChunkReader; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.DefaultChunkWriterFactory; import io.deephaven.extensions.barrage.util.BarrageUtil; @@ -52,7 +52,6 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; -import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.Schema; import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; @@ -363,7 +362,7 @@ public BarrageMessageProducer( parent.getColumnSourceMap().forEach((columnName, columnSource) -> { int ii = mi.getAndIncrement(); - chunkWriters[ii] = DefaultChunkWriterFactory.INSTANCE.newWriter(ChunkReader.typeInfo( + chunkWriters[ii] = DefaultChunkWriterFactory.INSTANCE.newWriter(BarrageTypeInfo.make( ReinterpretUtils.maybeConvertToPrimitiveDataType(columnSource.getType()), columnSource.getComponentType(), schema.fields(ii))); diff --git a/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java b/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java index 9aacbd68cd9..28ab6a1ee76 100644 --- a/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java +++ b/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java @@ -22,7 +22,6 @@ import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.*; -import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.DefaultChunkWriterFactory; import io.deephaven.extensions.barrage.util.BarrageUtil; @@ -363,7 +362,7 @@ private static long buildAndSendSnapshot( } barrageMessage.addColumnData[ci] = addColumnData; - chunkWriters[ci] = DefaultChunkWriterFactory.INSTANCE.newWriter(ChunkReader.typeInfo( + chunkWriters[ci] = DefaultChunkWriterFactory.INSTANCE.newWriter(BarrageTypeInfo.make( ReinterpretUtils.maybeConvertToPrimitiveDataType(columnDefinition.getDataType()), columnDefinition.getComponentType(), BarrageUtil.flatbufFieldFor(columnDefinition, Map.of()))); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessageReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessageReader.java index aeb335c3364..428c1b991be 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessageReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessageReader.java @@ -11,6 +11,8 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.ChunkWriter; import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.util.FlatBufferIteratorAdapter; @@ -56,7 +58,7 @@ public class WebBarrageMessageReader { private final List>> readers = new ArrayList<>(); public WebBarrageMessage parseFrom( - final ChunkReader.Options options, + final BarrageOptions options, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, @@ -155,7 +157,7 @@ public WebBarrageMessage parseFrom( for (int i = 0; i < schema.fieldsLength(); i++) { Field field = schema.fields(i); readers.add(chunkReaderFactory.newReader( - ChunkReader.typeInfo(columnTypes[i], componentTypes[i], field), options)); + BarrageTypeInfo.make(columnTypes[i], componentTypes[i], field), options)); } return null; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index 55759d65ed7..0fedeb406cc 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -12,6 +12,8 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.BarrageOptions; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.extensions.barrage.chunk.BooleanChunkReader; import io.deephaven.extensions.barrage.chunk.ByteChunkReader; import io.deephaven.extensions.barrage.chunk.CharChunkReader; @@ -64,8 +66,8 @@ public class WebChunkReaderFactory implements ChunkReader.Factory { @SuppressWarnings("unchecked") @Override public > ChunkReader newReader( - @NotNull final ChunkReader.TypeInfo typeInfo, - @NotNull final ChunkReader.Options options) { + @NotNull final BarrageTypeInfo typeInfo, + @NotNull final BarrageOptions options) { switch (typeInfo.arrowField().typeType()) { case Type.Int: { Int t = new Int(); @@ -263,7 +265,7 @@ public > ChunkReader newReader( outChunk, outOffset, totalRows); } - final ChunkReader.TypeInfo componentTypeInfo = new ChunkReader.TypeInfo( + final BarrageTypeInfo componentTypeInfo = new BarrageTypeInfo( typeInfo.componentType(), typeInfo.componentType().getComponentType(), typeInfo.arrowField().children(0)); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 7eff8266775..8ea204f161b 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -9,6 +9,7 @@ import elemental2.core.Uint8Array; import elemental2.promise.Promise; import io.deephaven.chunk.ChunkType; +import io.deephaven.extensions.barrage.BarrageTypeInfo; import io.deephaven.javascript.proto.dhinternal.browserheaders.BrowserHeaders; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse; import io.deephaven.web.client.api.*; @@ -255,8 +256,7 @@ public ChunkType[] chunkTypes() { /** * Returns the Java Class to represent each column in the table. This lets the client replace certain JVM-only - * classes with alternative implementations, but still use the simple - * {@link io.deephaven.extensions.barrage.chunk.ChunkReader.TypeInfo} wrapper. + * classes with alternative implementations, but still use the simple {@link BarrageTypeInfo} wrapper. */ public Class[] columnTypes() { return Arrays.stream(tableDef.getColumns())