From 4c08c7d5e6be64feb7bf4a60dcd9fe85210ecbfe Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 28 Dec 2023 15:44:55 -0600 Subject: [PATCH 001/219] Quick rewrite to drop js flatbuffers, using java impl for barrage/flight Doesn't actually work yet. Before: dh-internal.js: 1.55MB dh-core.js: 2.22MB After: dh-internal.js: 1.47MB dh-core.js: 2.24MB --- proto/raw-js-openapi/src/index.js | 28 +- web/client-api/client-api.gradle | 14 +- .../io/deephaven/web/DeephavenApi.gwt.xml | 3 + .../web/client/api/WorkerConnection.java | 96 ++-- .../client/api/barrage/WebBarrageUtils.java | 170 ++++--- .../web/client/api/parse/JsDataHandler.java | 61 +-- .../TableViewportSubscription.java | 49 +- .../web/client/api/tree/JsTreeTable.java | 57 ++- .../web/client/state/ClientTableState.java | 3 +- .../google/flatbuffers/FlatBuffers.gwt.xml | 6 + .../google/flatbuffers/ArrayReadWriteBuf.java | 252 ++++++++++ .../super/com/google/flatbuffers/Utf8Old.java | 8 + .../com/google/flatbuffers/Utf8Safe.java | 448 ++++++++++++++++++ .../flatbuf/BarrageFlatbufFormat.gwt.xml | 4 + .../arrow/flatbuf/FlightFlatbufFormat.gwt.xml | 4 + 15 files changed, 966 insertions(+), 237 deletions(-) create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/FlatBuffers.gwt.xml create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Old.java create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Safe.java create mode 100644 web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml create mode 100644 web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml diff --git a/proto/raw-js-openapi/src/index.js b/proto/raw-js-openapi/src/index.js index 55ebdf643d1..d753c1346cc 100644 --- a/proto/raw-js-openapi/src/index.js +++ b/proto/raw-js-openapi/src/index.js @@ -28,11 +28,11 @@ var browserHeaders = require("browser-headers"); var grpcWeb = require("@improbable-eng/grpc-web");//usually .grpc var jspb = require("google-protobuf"); -var flatbuffers = require("flatbuffers").flatbuffers; -var barrage = require("@deephaven/barrage"); +// var flatbuffers = require("flatbuffers").flatbuffers; +// var barrage = require("@deephaven/barrage"); -var message = require('./arrow/flight/flatbuf/Message_generated'); -var schema = require('./arrow/flight/flatbuf/Schema_generated'); +// var message = require('./arrow/flight/flatbuf/Message_generated'); +// var schema = require('./arrow/flight/flatbuf/Schema_generated'); var io = { deephaven: { proto: { @@ -58,17 +58,17 @@ var io = { deephaven: { hierarchicaltable_pb: proto.io.deephaven.proto.backplane.grpc, hierarchicaltable_pb_service: hierarchicalTableService }, - barrage: { - "flatbuf": { - "Barrage_generated": barrage, - } - } + // barrage: { + // "flatbuf": { + // "Barrage_generated": barrage, + // } + // } }}; var arrow = { flight: { - flatbuf: { - Message_generated: message, - Schema_generated: schema, - }, + // flatbuf: { + // Message_generated: message, + // Schema_generated: schema, + // }, protocol: { Flight_pb: proto.arrow.flight.protocol, Flight_pb_service: flightService, @@ -80,7 +80,7 @@ var dhinternal = { browserHeaders, jspb, grpcWeb,//TODO need to expand this to the specific things we need - flatbuffers, + // flatbuffers, io, arrow }; diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 2aba31ade74..2a28a71bc3f 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -18,15 +18,25 @@ configurations { typescriptDoclet testImplementation.extendsFrom junit } - +repositories { + mavenLocal() +} dependencies { implementation project(':web-shared-beans') implementation project(':web-client-backplane') + // FIXME: update supersource to match 1.12.0, it is currently 2.0 + implementation 'com.google.flatbuffers:flatbuffers-java:1.12.0' + implementation 'com.google.flatbuffers:flatbuffers-java:1.12.0:sources' + implementation 'org.apache.arrow:arrow-format:13.0.0' + implementation 'org.apache.arrow:arrow-format:13.0.0:sources' + implementation 'io.deephaven.barrage:barrage-format:0.6.0' + implementation 'io.deephaven.barrage:barrage-format:0.6.0:sources' + implementation 'com.vertispan.tsdefs:jsinterop-ts-defs-annotations:1.0.0-RC4' typescriptDoclet 'com.vertispan.tsdefs:jsinterop-ts-defs-doclet:1.0.0-RC4' - implementation 'com.vertispan.nio:gwt-nio:1.0-alpha-1' + implementation 'com.vertispan.nio:gwt-nio:0.1-SNAPSHOT' js project(path: ':proto:raw-js-openapi', configuration: 'js') diff --git a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml index 4f6177621b8..307ea03bb6a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml +++ b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml @@ -2,6 +2,9 @@ + + + diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index f9fe987e1b6..b9d77372adf 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api; +import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsIgnore; import elemental2.core.JsArray; import elemental2.core.JsObject; @@ -12,30 +13,19 @@ import elemental2.dom.CustomEventInit; import elemental2.dom.DomGlobal; import elemental2.promise.Promise; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.FieldNode; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.Message; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.MessageHeader; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.RecordBatch; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Buffer; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Field; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.KeyValue; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.MetadataVersion; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Schema; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.browserflight_pb_service.BrowserFlightServiceClient; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.HandshakeRequest; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.HandshakeResponse; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb_service.FlightServiceClient; import io.deephaven.javascript.proto.dhinternal.browserheaders.BrowserHeaders; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; import io.deephaven.javascript.proto.dhinternal.grpcweb.grpc.Code; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb.FieldInfo; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb.FieldsChangeUpdate; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb.ListFieldsRequest; @@ -104,6 +94,16 @@ import jsinterop.annotations.JsOptional; import jsinterop.base.Js; import jsinterop.base.JsPropertyMap; +import org.apache.arrow.flatbuf.Buffer; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.FieldNode; +import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.MetadataVersion; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; +import org.gwtproject.nio.TypedArrayHelper; import javax.annotation.Nullable; import java.nio.ByteBuffer; @@ -1074,12 +1074,12 @@ public Promise newTable(String[] columnNames, String[] types, Object[][ dataRef[0] = null; // make a schema that we can embed in the first DoPut message - Builder schema = new Builder(1024); + FlatBufferBuilder schema = new FlatBufferBuilder(1024); // while we're examining columns, build the copiers for data List columns = new ArrayList<>(); - double[] fields = new double[columnNames.length]; + int[] fields = new int[columnNames.length]; for (int i = 0; i < columnNames.length; i++) { String columnName = columnNames[i]; String columnType = types[i]; @@ -1087,9 +1087,9 @@ public Promise newTable(String[] columnNames, String[] types, Object[][ JsDataHandler writer = JsDataHandler.getHandler(columnType); columns.add(writer); - double nameOffset = schema.createString(columnName); - double typeOffset = writer.writeType(schema); - double metadataOffset = Field.createCustomMetadataVector(schema, new double[] { + int nameOffset = schema.createString(columnName); + int typeOffset = writer.writeType(schema); + int metadataOffset = Field.createCustomMetadataVector(schema, new int[] { KeyValue.createKeyValue(schema, schema.createString("deephaven:type"), schema.createString(writer.deephavenType())) }); @@ -1104,7 +1104,7 @@ public Promise newTable(String[] columnNames, String[] types, Object[][ fields[i] = Field.endField(schema); } - double fieldsOffset = Schema.createFieldsVector(schema, fields); + int fieldsOffset = Schema.createFieldsVector(schema, fields); Schema.startSchema(schema); Schema.addFields(schema, fieldsOffset); @@ -1146,7 +1146,7 @@ public Promise newTable(String[] columnNames, String[] types, Object[][ FlightData bodyMessage = new FlightData(); bodyMessage.setAppMetadata(WebBarrageUtils.emptyMessage()); - Builder bodyData = new Builder(1024); + FlatBufferBuilder bodyData = new FlatBufferBuilder(1024); // iterate each column, building buffers and fieldnodes, as well as building the actual payload List buffers = new ArrayList<>(); @@ -1170,25 +1170,25 @@ public Promise newTable(String[] columnNames, String[] types, Object[][ for (int i = buffers.size() - 1; i >= 0; i--) { Uint8Array buffer = buffers.get(i); cumulativeOffset -= buffer.byteLength; - Buffer.createBuffer(bodyData, Long.create(cumulativeOffset, 0), Long.create(buffer.byteLength, 0)); + Buffer.createBuffer(bodyData, cumulativeOffset, buffer.byteLength); } assert cumulativeOffset == 0; - double buffersOffset = bodyData.endVector(); + int buffersOffset = bodyData.endVector(); RecordBatch.startNodesVector(bodyData, nodes.size()); for (int i = nodes.size() - 1; i >= 0; i--) { JsDataHandler.Node node = nodes.get(i); - FieldNode.createFieldNode(bodyData, Long.create(node.length(), 0), Long.create(node.nullCount(), 0)); + FieldNode.createFieldNode(bodyData, node.length(), node.nullCount()); } - double nodesOffset = bodyData.endVector(); + int nodesOffset = bodyData.endVector(); RecordBatch.startRecordBatch(bodyData); RecordBatch.addBuffers(bodyData, buffersOffset); RecordBatch.addNodes(bodyData, nodesOffset); - RecordBatch.addLength(bodyData, Long.create(data[0].length, 0)); + RecordBatch.addLength(bodyData, data[0].length); - double recordBatchOffset = RecordBatch.endRecordBatch(bodyData); + int recordBatchOffset = RecordBatch.endRecordBatch(bodyData); bodyMessage.setDataHeader(createMessage(bodyData, MessageHeader.RecordBatch, recordBatchOffset, length, 0)); bodyMessage.setDataBody(padAndConcat(buffers, length)); @@ -1209,11 +1209,11 @@ private Uint8Array padAndConcat(List buffers, int length) { return all; } - private static Uint8Array createMessage(Builder payload, int messageHeaderType, double messageHeaderOffset, - int bodyLength, double customMetadataOffset) { + private static Uint8Array createMessage(FlatBufferBuilder payload, byte messageHeaderType, int messageHeaderOffset, + int bodyLength, int customMetadataOffset) { payload.finish(Message.createMessage(payload, MetadataVersion.V5, messageHeaderType, messageHeaderOffset, - Long.create(bodyLength, 0), customMetadataOffset)); - return payload.asUint8Array(); + bodyLength, customMetadataOffset)); + return new Uint8Array(TypedArrayHelper.unwrap(payload.dataBuffer())); } public Promise mergeTables(JsTable[] tables, HasEventHandling failHandler) { @@ -1424,21 +1424,22 @@ private void flush() { state.setSubscribed(true); - Builder subscriptionReq = new Builder(1024); + FlatBufferBuilder subscriptionReq = new FlatBufferBuilder(1024); - double columnsOffset = BarrageSubscriptionRequest.createColumnsVector(subscriptionReq, - makeUint8ArrayFromBitset(includedColumns)); - double viewportOffset = 0; + int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(subscriptionReq, + includedColumns.toByteArray()); + int viewportOffset = 0; if (isViewport) { viewportOffset = BarrageSubscriptionRequest.createViewportVector(subscriptionReq, serializeRanges( vps.stream().map(TableSubscriptionRequest::getRows).collect(Collectors.toSet()))); } // TODO #188 support minUpdateIntervalMs - double serializationOptionsOffset = BarrageSubscriptionOptions + int serializationOptionsOffset = BarrageSubscriptionOptions .createBarrageSubscriptionOptions(subscriptionReq, ColumnConversionMode.Stringify, true, 1000, - 0, 0); - double tableTicketOffset = - BarrageSubscriptionRequest.createTicketVector(subscriptionReq, state.getHandle().getTicket()); + 0, 0, false); + int tableTicketOffset = + BarrageSubscriptionRequest.createTicketVector(subscriptionReq, + TypedArrayHelper.wrap(state.getHandle().getTicket())); BarrageSubscriptionRequest.startBarrageSubscriptionRequest(subscriptionReq); BarrageSubscriptionRequest.addColumns(subscriptionReq, columnsOffset); BarrageSubscriptionRequest.addSubscriptionOptions(subscriptionReq, serializationOptionsOffset); @@ -1462,26 +1463,23 @@ private void flush() { public void apply(FlightData data) { ByteBuffer body = typedArrayToLittleEndianByteBuffer(data.getDataBody_asU8()); Message headerMessage = Message - .getRootAsMessage(new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - data.getDataHeader_asU8())); + .getRootAsMessage(TypedArrayHelper.wrap(data.getDataHeader_asU8())); if (body.limit() == 0 && headerMessage.headerType() != MessageHeader.RecordBatch) { // a subscription stream presently ignores schemas and other message types // TODO hang on to the schema to better handle the now-Utf8 columns return; } - RecordBatch header = headerMessage.header(new RecordBatch()); + RecordBatch header = (RecordBatch) headerMessage.header(new RecordBatch()); BarrageMessageWrapper barrageMessageWrapper = BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - data.getAppMetadata_asU8())); + TypedArrayHelper.wrap(data.getAppMetadata_asU8())); if (barrageMessageWrapper.msgType() == BarrageMessageType.None) { // continue previous message, just read RecordBatch appendAndMaybeFlush(header, body); } else { assert barrageMessageWrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata; BarrageUpdateMetadata barrageUpdate = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - new Uint8Array(barrageMessageWrapper.msgPayloadArray()))); + barrageMessageWrapper.msgPayloadAsByteBuffer()); startAndMaybeFlush(barrageUpdate.isSnapshot(), header, body, barrageUpdate, isViewport, columnTypes); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index a68e9b3d843..27e910a47c0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -3,26 +3,26 @@ // package io.deephaven.web.client.api.barrage; +import com.google.flatbuffers.FlatBufferBuilder; import elemental2.core.*; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.FieldNode; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.Message; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.MessageHeader; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.RecordBatch; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Buffer; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Field; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.KeyValue; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Schema; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.def.TableAttributesDefinition; import io.deephaven.web.shared.data.*; import io.deephaven.web.shared.data.columns.*; import jsinterop.base.Js; +import org.apache.arrow.flatbuf.Buffer; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.FieldNode; +import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; import org.gwtproject.nio.TypedArrayHelper; import java.math.BigDecimal; @@ -37,6 +37,7 @@ import java.util.Map; import java.util.Set; import java.util.function.DoubleFunction; +import java.util.function.IntFunction; import java.util.stream.IntStream; /** @@ -45,22 +46,21 @@ public class WebBarrageUtils { private static final int MAGIC = 0x6E687064; - public static Uint8Array wrapMessage(Builder innerBuilder, int messageType) { - Builder outerBuilder = new Builder(1024); - // This deprecation is incorrect, tsickle didn't understand that only one overload is deprecated - // noinspection deprecation - double messageOffset = BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.asUint8Array()); - double offset = + public static Uint8Array wrapMessage(FlatBufferBuilder innerBuilder, byte messageType) { + // TODO this doesnt look right, probably we can append the message rather than copying? + FlatBufferBuilder outerBuilder = new FlatBufferBuilder(1024); + int messageOffset = BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.dataBuffer()); + int offset = BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, MAGIC, messageType, messageOffset); outerBuilder.finish(offset); - return outerBuilder.asUint8Array(); + return new Uint8Array(TypedArrayHelper.unwrap(outerBuilder.dataBuffer().slice())); } public static Uint8Array emptyMessage() { - Builder builder = new Builder(1024); - double offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, BarrageMessageType.None, 0); + FlatBufferBuilder builder = new FlatBufferBuilder(1024); + int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, BarrageMessageType.None, 0); builder.finish(offset); - return builder.asUint8Array(); + return new Uint8Array(TypedArrayHelper.unwrap(builder.dataBuffer())); } public static InitialTableDefinition readTableDefinition(Schema schema) { @@ -83,7 +83,7 @@ public static ColumnDefinition[] readColumnDefinitions(Schema schema) { Field f = schema.fields(i); Map fieldMetadata = keyValuePairs("deephaven:", f.customMetadataLength(), f::customMetadata); - cols[i].setName(f.name().asString()); + cols[i].setName(f.name()); cols[i].setColumnIndex(i); cols[i].setType(fieldMetadata.get("type")); cols[i].setIsSortable("true".equals(fieldMetadata.get("isSortable"))); @@ -129,24 +129,23 @@ public static Schema readSchemaMessage(Uint8Array flightSchemaMessage) { // - IPC_CONTINUATION_TOKEN (4-byte int of -1) // - message size (4-byte int) // - a Message wrapping the schema - io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer bb = - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer(flightSchemaMessage); - bb.setPosition(bb.position() + 8); + ByteBuffer bb = TypedArrayHelper.wrap(flightSchemaMessage); + bb.position(bb.position() + 8); Message headerMessage = Message.getRootAsMessage(bb); assert headerMessage.headerType() == MessageHeader.Schema; - return headerMessage.header(new Schema()); + return (Schema) headerMessage.header(new Schema()); } public static Map keyValuePairs(String filterPrefix, double count, - DoubleFunction accessor) { + IntFunction accessor) { Map map = new HashMap<>(); for (int i = 0; i < count; i++) { KeyValue pair = accessor.apply(i); - String key = pair.key().asString(); + String key = pair.key(); if (key.startsWith(filterPrefix)) { key = key.substring(filterPrefix.length()); - String oldValue = map.put(key, pair.value().asString()); + String oldValue = map.put(key, pair.value()); assert oldValue == null : key + " had " + oldValue + ", replaced with " + pair.value(); } } @@ -198,10 +197,10 @@ public static Uint8Array makeUint8ArrayFromBitset(BitSet bitset) { return array; } - public static Uint8Array serializeRanges(Set rangeSets) { + public static ByteBuffer serializeRanges(Set rangeSets) { final RangeSet s; if (rangeSets.size() == 0) { - return new Uint8Array(0); + return ByteBuffer.allocate(0); } else if (rangeSets.size() == 1) { s = rangeSets.iterator().next(); } else { @@ -212,20 +211,17 @@ public static Uint8Array serializeRanges(Set rangeSets) { } ByteBuffer payload = CompressedRangeSetReader.writeRange(s); - ArrayBufferView buffer = TypedArrayHelper.unwrap(payload); - return new Uint8Array(buffer); + return payload; } public static ByteBuffer typedArrayToLittleEndianByteBuffer(Uint8Array data) { - ArrayBuffer slicedBuffer = data.slice().buffer; - ByteBuffer bb = TypedArrayHelper.wrap(slicedBuffer); + ByteBuffer bb = TypedArrayHelper.wrap(data); bb.order(ByteOrder.LITTLE_ENDIAN); return bb; } public static ByteBuffer typedArrayToLittleEndianByteBuffer(Int8Array data) { - ArrayBuffer slicedBuffer = data.slice().buffer; - ByteBuffer bb = TypedArrayHelper.wrap(slicedBuffer); + ByteBuffer bb = TypedArrayHelper.wrap(data); bb.order(ByteOrder.LITTLE_ENDIAN); return bb; } @@ -236,15 +232,15 @@ public static TableSnapshot createSnapshot(RecordBatch header, ByteBuffer body, final RangeSet includedAdditions; if (barrageUpdate == null) { - includedAdditions = added = RangeSet.ofRange(0, (long) (header.length().toFloat64() - 1)); + includedAdditions = added = RangeSet.ofRange(0, (long) (header.length() - 1)); } else { added = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray())); + .read(barrageUpdate.addedRowsAsByteBuffer()); - Int8Array addedRowsIncluded = barrageUpdate.addedRowsIncludedArray(); + ByteBuffer addedRowsIncluded = barrageUpdate.addedRowsIncludedAsByteBuffer(); if (isViewport && addedRowsIncluded != null) { includedAdditions = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(addedRowsIncluded)); + .read(addedRowsIncluded); } else { // if this isn't a viewport, then a second index isn't sent, because all rows are included includedAdditions = added; @@ -282,19 +278,19 @@ public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewpo this.columnTypes = columnTypes; deltaUpdates.setAdded(new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray()))); + .read(barrageUpdate.addedRowsAsByteBuffer())); deltaUpdates.setRemoved(new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.removedRowsArray()))); + .read(barrageUpdate.removedRowsAsByteBuffer())); deltaUpdates.setShiftedRanges( - new ShiftedRangeReader().read(typedArrayToLittleEndianByteBuffer(barrageUpdate.shiftDataArray()))); + new ShiftedRangeReader().read(barrageUpdate.shiftDataAsByteBuffer())); RangeSet includedAdditions; - Int8Array addedRowsIncluded = barrageUpdate.addedRowsIncludedArray(); + ByteBuffer addedRowsIncluded = barrageUpdate.addedRowsIncludedAsByteBuffer(); if (isViewport && addedRowsIncluded != null) { includedAdditions = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(addedRowsIncluded)); + .read(addedRowsIncluded); } else { // if this isn't a viewport, then a second index isn't sent, because all rows are included includedAdditions = deltaUpdates.getAdded(); @@ -307,7 +303,7 @@ public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewpo for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { BarrageModColumnMetadata columnMetadata = barrageUpdate.modColumnNodes(columnIndex); RangeSet modifiedRows = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(columnMetadata.modifiedRowsArray())); + .read(columnMetadata.modifiedRowsAsByteBuffer()); numModRowsRemaining = Math.max(numModRowsRemaining, modifiedRows.size()); } } @@ -334,13 +330,13 @@ private void handleAddBatch(RecordBatch recordBatch, ByteBuffer body) { DeltaUpdates.ColumnAdditions[] addedColumnData = new DeltaUpdates.ColumnAdditions[columnTypes.length]; for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { assert nodes.hasNext() && buffers.hasNext(); - ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length().toFloat64(), + ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length(), columnTypes[columnIndex]); addedColumnData[columnIndex] = new DeltaUpdates.ColumnAdditions(columnIndex, columnData); } deltaUpdates.setSerializedAdditions(addedColumnData); - numAddRowsRemaining -= (long) recordBatch.length().toFloat64(); + numAddRowsRemaining -= (long) recordBatch.length(); } private void handleModBatch(RecordBatch recordBatch, ByteBuffer body) { @@ -356,15 +352,15 @@ private void handleModBatch(RecordBatch recordBatch, ByteBuffer body) { BarrageModColumnMetadata columnMetadata = barrageUpdate.modColumnNodes(columnIndex); RangeSet modifiedRows = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(columnMetadata.modifiedRowsArray())); + .read(columnMetadata.modifiedRowsAsByteBuffer()); - ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length().toFloat64(), + ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length(), columnTypes[columnIndex]); modifiedColumnData[columnIndex] = new DeltaUpdates.ColumnModifications(columnIndex, modifiedRows, columnData); } deltaUpdates.setSerializedModifications(modifiedColumnData); - numModRowsRemaining -= (long) recordBatch.length().toFloat64(); + numModRowsRemaining -= (long) recordBatch.length(); } public DeltaUpdates build() { @@ -377,29 +373,29 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes // explicit cast to be clear that we're rounding down BitSet valid = readValidityBufferAsBitset(data, size, buffers.next()); FieldNode thisNode = nodes.next(); - boolean hasNulls = thisNode.nullCount().toFloat64() != 0; - size = Math.min(size, (int) thisNode.length().toFloat64()); + boolean hasNulls = thisNode.nullCount() != 0; + size = Math.min(size, (int) thisNode.length()); Buffer positions = buffers.next(); switch (columnType) { // for simple well-supported typedarray types, wrap and return case "int": - assert positions.length().toFloat64() >= size * 4; + assert positions.length() >= size * 4; Int32Array intArray = new Int32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset(), size); return new IntArrayColumnData(Js.uncheckedCast(intArray)); case "short": - assert positions.length().toFloat64() >= size * 2; + assert positions.length() >= size * 2; Int16Array shortArray = new Int16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset(), size); return new ShortArrayColumnData(Js.uncheckedCast(shortArray)); case "boolean": case "java.lang.Boolean": // noinspection IntegerDivisionInFloatingPointContext - assert positions.length().toFloat64() >= ((size + 63) / 64); + assert positions.length() >= ((size + 63) / 64); // booleans are stored as a bitset, but internally we represent booleans as bytes - data.position((int) positions.offset().toFloat64()); - BitSet wireValues = readBitSetWithLength(data, (int) (positions.length().toFloat64())); + data.position((int) positions.offset()); + BitSet wireValues = readBitSetWithLength(data, (int) (positions.length())); Boolean[] boolArray = new Boolean[size]; for (int i = 0; i < size; ++i) { if (!hasNulls || valid.get(i)) { @@ -408,41 +404,41 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } return new BooleanArrayColumnData(boolArray); case "byte": - assert positions.length().toFloat64() >= size; + assert positions.length() >= size; Int8Array byteArray = - new Int8Array(TypedArrayHelper.unwrap(data).buffer, (int) positions.offset().toFloat64(), size); + new Int8Array(TypedArrayHelper.unwrap(data).buffer, (int) positions.offset(), size); return new ByteArrayColumnData(Js.uncheckedCast(byteArray)); case "double": - assert positions.length().toFloat64() >= size * 8; + assert positions.length() >= size * 8; Float64Array doubleArray = new Float64Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset(), size); return new DoubleArrayColumnData(Js.uncheckedCast(doubleArray)); case "float": - assert positions.length().toFloat64() >= size * 4; + assert positions.length() >= size * 4; Float32Array floatArray = new Float32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset(), size); return new FloatArrayColumnData(Js.uncheckedCast(floatArray)); case "char": - assert positions.length().toFloat64() >= size * 2; + assert positions.length() >= size * 2; Uint16Array charArray = new Uint16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset(), size); return new CharArrayColumnData(Js.uncheckedCast(charArray)); // longs are a special case despite being java primitives case "long": case "java.time.Instant": case "java.time.ZonedDateTime": - assert positions.length().toFloat64() >= size * 8; + assert positions.length() >= size * 8; long[] longArray = new long[size]; - data.position((int) positions.offset().toFloat64()); + data.position((int) positions.offset()); for (int i = 0; i < size; i++) { longArray[i] = data.getLong(); } return new LongArrayColumnData(longArray); // all other types are read out in some custom way case "java.time.LocalTime":// LocalDateArrayColumnData - assert positions.length().toFloat64() >= size * 6; - data.position((int) positions.offset().toFloat64()); + assert positions.length() >= size * 6; + data.position((int) positions.offset()); LocalDate[] localDateArray = new LocalDate[size]; for (int i = 0; i < size; i++) { int year = data.getInt(); @@ -452,10 +448,10 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } return new LocalDateArrayColumnData(localDateArray); case "java.time.LocalDate":// LocalTimeArrayColumnData - assert positions.length().toFloat64() == size * 7; + assert positions.length() == size * 7; LocalTime[] localTimeArray = new LocalTime[size]; - data.position((int) positions.offset().toFloat64()); + data.position((int) positions.offset()); for (int i = 0; i < size; i++) { int nano = data.getInt(); byte hour = data.get(); @@ -471,8 +467,8 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes if (columnType.endsWith("[]")) { FieldNode arrayNode = nodes.next(); - int innerSize = (int) arrayNode.length().toFloat64(); - boolean innerHasNulls = arrayNode.nullCount().toFloat64() != 0; + int innerSize = (int) arrayNode.length(); + boolean innerHasNulls = arrayNode.nullCount() != 0; // array type, also read the inner valid buffer and inner offset buffer BitSet innerValid = readValidityBufferAsBitset(data, innerSize, buffers.next()); @@ -501,7 +497,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } // might be cheaper to do views on the underlying bb (which will be copied anyway // into the String) - data.position((int) (payload.offset().toFloat64()) + innerOffsets.get(inner)); + data.position((int) (payload.offset()) + innerOffsets.get(inner)); int stringSize = innerOffsets.get(inner + 1) - innerOffsets.get(inner); byte[] stringBytes = new byte[stringSize]; data.get(stringBytes); @@ -529,7 +525,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } int ioff = offsets.get(i); int len = offsets.get(i + 1) - ioff; - data.position((int) (payload.offset().toFloat64()) + ioff); + data.position((int) (payload.offset()) + ioff); if (buf.length < len) { buf = new byte[len]; } @@ -548,7 +544,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } int ioff = offsets.get(i); int len = offsets.get(i + 1) - ioff; - data.position((int) (payload.offset().toFloat64()) + ioff); + data.position((int) (payload.offset()) + ioff); int scale = data.getInt(); len -= 4; if (buf == null || buf.length != len) { @@ -570,7 +566,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes if (buf == null || buf.length != len) { buf = new byte[len]; } - data.position((int) (payload.offset().toFloat64()) + ioff); + data.position((int) (payload.offset()) + ioff); bigIntArray[i] = readBigInt(data, buf); } return new BigIntegerArrayColumnData(bigIntArray); @@ -595,13 +591,13 @@ private static BigInteger readBigInt(ByteBuffer data, byte[] buf) { } private static BitSet readValidityBufferAsBitset(ByteBuffer data, int size, Buffer buffer) { - if (size == 0 || buffer.length().toFloat64() == 0) { + if (size == 0 || buffer.length() == 0) { // these buffers are optional (and empty) if the column is empty, or if it has primitives and we've allowed // DH nulls return new BitSet(0); } - data.position((int) buffer.offset().toFloat64()); - BitSet valid = readBitSetWithLength(data, (int) (buffer.length().toFloat64())); + data.position((int) buffer.offset()); + BitSet valid = readBitSetWithLength(data, (int) (buffer.length())); return valid; } @@ -617,7 +613,7 @@ private static IntBuffer readOffsets(ByteBuffer data, int size, Buffer buffer) { IntBuffer emptyOffsets = IntBuffer.allocate(1); return emptyOffsets; } - data.position((int) buffer.offset().toFloat64()); + data.position((int) buffer.offset()); IntBuffer offsets = data.slice().asIntBuffer(); offsets.limit(size + 1); return offsets; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java index d3edf0e03f4..1c6c9f0766a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api.parse; +import com.google.flatbuffers.FlatBufferBuilder; import com.google.gwt.i18n.client.TimeZone; import elemental2.core.ArrayBuffer; import elemental2.core.Float32Array; @@ -14,14 +15,6 @@ import elemental2.core.TypedArray; import elemental2.core.Uint16Array; import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Binary; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.FixedSizeBinary; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.FloatingPoint; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Int; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Precision; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Type; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Utf8; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; import io.deephaven.web.client.api.LongWrapper; import io.deephaven.web.client.api.i18n.JsDateTimeFormat; import io.deephaven.web.client.api.i18n.JsTimeZone; @@ -29,6 +22,13 @@ import io.deephaven.web.shared.fu.JsFunction; import jsinterop.base.Js; import jsinterop.base.JsArrayLike; +import org.apache.arrow.flatbuf.Binary; +import org.apache.arrow.flatbuf.FixedSizeBinary; +import org.apache.arrow.flatbuf.FloatingPoint; +import org.apache.arrow.flatbuf.Int; +import org.apache.arrow.flatbuf.Precision; +import org.apache.arrow.flatbuf.Type; +import org.apache.arrow.flatbuf.Utf8; import org.gwtproject.nio.TypedArrayHelper; import java.nio.ByteBuffer; @@ -101,8 +101,9 @@ public Uint8Array build() { } @Override - public double writeType(Builder builder) { - return Utf8.createUtf8(builder); + public int writeType(FlatBufferBuilder builder) { + Utf8.startUtf8(builder); + return Utf8.endUtf8(builder); } @Override @@ -201,7 +202,7 @@ private long parseDateString(String str, ParseContext context) { } @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 64, true); } @@ -249,7 +250,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, INTEGER(Type.Int, "int") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 32, true); } @@ -261,7 +262,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, SHORT(Type.Int, "short") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 16, true); } @@ -273,7 +274,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, LONG(Type.Int, "long") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 64, true); } @@ -319,7 +320,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, BYTE(Type.Int, "byte") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 8, true); } @@ -331,7 +332,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, CHAR(Type.Int, "char") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 16, false); } @@ -343,7 +344,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, FLOAT(Type.FloatingPoint, "float") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return FloatingPoint.createFloatingPoint(builder, Precision.SINGLE); } @@ -355,7 +356,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, DOUBLE(Type.FloatingPoint, "double") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return FloatingPoint.createFloatingPoint(builder, Precision.DOUBLE); } @@ -368,7 +369,7 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, BOOLEAN(Type.Bool, "boolean", "bool", "java.lang.Boolean") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return Int.createInt(builder, 8, true); } @@ -440,25 +441,27 @@ public void write(Object[] data, ParseContext context, JsConsumer addNode, }, BIG_DECIMAL(Type.Binary, "java.util.BigDecimal") { @Override - public double writeType(Builder builder) { - return Binary.createBinary(builder); + public int writeType(FlatBufferBuilder builder) { + Binary.startBinary(builder); + return Binary.endBinary(builder); } }, BIG_INTEGER(Type.Binary, "java.util.BigInteger") { @Override - public double writeType(Builder builder) { - return Binary.createBinary(builder); + public int writeType(FlatBufferBuilder builder) { + Binary.startBinary(builder); + return Binary.endBinary(builder); } }, LOCAL_DATE(Type.FixedSizeBinary, "java.time.LocalDate", "localdate") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return FixedSizeBinary.createFixedSizeBinary(builder, 6); } }, LOCAL_TIME(Type.FixedSizeBinary, "java.time.LocalTime", "localtime") { @Override - public double writeType(Builder builder) { + public int writeType(FlatBufferBuilder builder) { return FixedSizeBinary.createFixedSizeBinary(builder, 7); } }, @@ -540,10 +543,10 @@ private static class HandlersHolder { private static final int SEPARATOR_INDEX = DEFAULT_DATE_TIME_PATTERN.indexOf('T'); - private final int arrowTypeType; + private final byte arrowTypeType; private final String deephavenType; - JsDataHandler(int arrowTypeType, String... typeNames) { + JsDataHandler(byte arrowTypeType, String... typeNames) { this.arrowTypeType = arrowTypeType; assert typeNames.length > 0 : "Must have at least one name"; this.deephavenType = typeNames[0]; @@ -553,7 +556,7 @@ private static class HandlersHolder { } } - public int typeType() { + public byte typeType() { return arrowTypeType; } @@ -561,7 +564,7 @@ public String deephavenType() { return deephavenType; } - public abstract double writeType(Builder builder); + public abstract int writeType(FlatBufferBuilder builder); public void write(Object[] data, ParseContext context, JsConsumer addNode, JsConsumer addBuffer) { throw new UnsupportedOperationException("Can't parse " + name()); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index d567fbd3094..cd2acacb189 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api.subscription; +import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; import elemental2.core.Uint8Array; @@ -11,18 +12,15 @@ import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; import elemental2.promise.Promise; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.Message; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.MessageHeader; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.RecordBatch; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageSnapshotOptions; +import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSnapshotOptions; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.web.client.api.Callbacks; import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.HasEventHandling; @@ -41,6 +39,10 @@ import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsOptional; import jsinterop.base.Js; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.RecordBatch; +import org.gwtproject.nio.TypedArrayHelper; import java.util.Arrays; import java.util.BitSet; @@ -320,15 +322,16 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { c::apply), new FlightData()); - Builder doGetRequest = new Builder(1024); - double columnsOffset = BarrageSnapshotRequest.createColumnsVector(doGetRequest, - makeUint8ArrayFromBitset(columnBitset)); - double viewportOffset = BarrageSnapshotRequest.createViewportVector(doGetRequest, serializeRanges( + FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); + int columnsOffset = BarrageSnapshotRequest.createColumnsVector(doGetRequest, + columnBitset.toByteArray()); + int viewportOffset = BarrageSnapshotRequest.createViewportVector(doGetRequest, serializeRanges( Collections.singleton(rows.getRange()))); - double serializationOptionsOffset = BarrageSnapshotOptions + int serializationOptionsOffset = BarrageSnapshotOptions .createBarrageSnapshotOptions(doGetRequest, ColumnConversionMode.Stringify, true, 0, 0); - double tableTicketOffset = - BarrageSnapshotRequest.createTicketVector(doGetRequest, state.getHandle().getTicket()); + int tableTicketOffset = + BarrageSnapshotRequest.createTicketVector(doGetRequest, + TypedArrayHelper.wrap(state.getHandle().getTicket())); BarrageSnapshotRequest.startBarrageSnapshotRequest(doGetRequest); BarrageSnapshotRequest.addTicket(doGetRequest, tableTicketOffset); BarrageSnapshotRequest.addColumns(doGetRequest, columnsOffset); @@ -343,24 +346,22 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { stream.end(); stream.onData(flightData -> { - Message message = Message.getRootAsMessage(new ByteBuffer(flightData.getDataHeader_asU8())); + Message message = Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); if (message.headerType() == MessageHeader.Schema) { // ignore for now, we'll handle this later return; } assert message.headerType() == MessageHeader.RecordBatch; - RecordBatch header = message.header(new RecordBatch()); + RecordBatch header = (RecordBatch) message.header(new RecordBatch()); Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); BarrageUpdateMetadata update = null; if (appMetadataBytes.length != 0) { BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - appMetadataBytes)); + BarrageMessageWrapper + .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - new ByteBuffer( - new Uint8Array(barrageMessageWrapper.msgPayloadArray()))); + barrageMessageWrapper.msgPayloadAsByteBuffer()); } TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, WebBarrageUtils.typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), update, diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 79a28fb2e0d..7e53c04708d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api.tree; +import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; import com.vertispan.tsdefs.annotations.TsUnion; @@ -14,19 +15,13 @@ import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; import elemental2.promise.Promise; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.Message; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.MessageHeader; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.RecordBatch; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Schema; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableApplyRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableDescriptor; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableSourceExportRequest; @@ -62,6 +57,11 @@ import jsinterop.annotations.JsType; import jsinterop.base.Any; import jsinterop.base.Js; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; +import org.gwtproject.nio.TypedArrayHelper; import java.util.*; import java.util.function.Function; @@ -668,19 +668,17 @@ private void replaceSubscription(RebuildStep step) { new FlightData()); FlightData subscriptionRequestWrapper = new FlightData(); - Builder doGetRequest = new Builder(1024); - double columnsOffset = BarrageSubscriptionRequest.createColumnsVector(doGetRequest, - makeUint8ArrayFromBitset(columnsBitset)); - double viewportOffset = BarrageSubscriptionRequest.createViewportVector(doGetRequest, - serializeRanges( - Collections.singleton( - range))); - double serializationOptionsOffset = BarrageSubscriptionOptions + FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); + int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(doGetRequest, + columnsBitset.toByteArray()); + int viewportOffset = BarrageSubscriptionRequest.createViewportVector(doGetRequest, + serializeRanges(Collections.singleton(range))); + int serializationOptionsOffset = BarrageSubscriptionOptions .createBarrageSubscriptionOptions(doGetRequest, ColumnConversionMode.Stringify, true, - updateInterval, 0, 0); - double tableTicketOffset = + updateInterval, 0, 0, false); + int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(doGetRequest, - viewTicket.ticket().getTicket_asU8()); + TypedArrayHelper.wrap(viewTicket.ticket().getTicket_asU8())); BarrageSubscriptionRequest.startBarrageSubscriptionRequest(doGetRequest); BarrageSubscriptionRequest.addTicket(doGetRequest, tableTicketOffset); BarrageSubscriptionRequest.addColumns(doGetRequest, columnsOffset); @@ -704,24 +702,23 @@ private void replaceSubscription(RebuildStep step) { this.stream = null; }); doExchange.onData(flightData -> { - Message message = Message.getRootAsMessage(new ByteBuffer(flightData.getDataHeader_asU8())); + Message message = + Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); if (message.headerType() == MessageHeader.Schema) { // ignore for now, we'll handle this later return; } assert message.headerType() == MessageHeader.RecordBatch; - RecordBatch header = message.header(new RecordBatch()); + RecordBatch header = (RecordBatch) message.header(new RecordBatch()); Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); BarrageUpdateMetadata update = null; if (appMetadataBytes.length != 0) { BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - new ByteBuffer( - appMetadataBytes)); + BarrageMessageWrapper + .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - new ByteBuffer( - new Uint8Array(barrageMessageWrapper.msgPayloadArray()))); + barrageMessageWrapper.msgPayloadAsByteBuffer()); } TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, WebBarrageUtils.typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 355e3f8e6f2..0afcb0e0b53 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -8,14 +8,12 @@ import elemental2.core.JsSet; import elemental2.core.Uint8Array; import elemental2.promise.Promise; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Schema; import io.deephaven.javascript.proto.dhinternal.browserheaders.BrowserHeaders; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse; import io.deephaven.web.client.api.*; import io.deephaven.web.client.api.barrage.WebBarrageUtils; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; -import io.deephaven.web.client.api.barrage.def.TableAttributesDefinition; import io.deephaven.web.client.api.batch.TableConfig; import io.deephaven.web.client.api.filter.FilterCondition; import io.deephaven.web.client.api.lifecycle.HasLifecycle; @@ -26,6 +24,7 @@ import io.deephaven.web.shared.data.*; import io.deephaven.web.shared.fu.*; import jsinterop.base.Js; +import org.apache.arrow.flatbuf.Schema; import java.util.*; import java.util.function.BinaryOperator; diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/FlatBuffers.gwt.xml b/web/client-api/src/main/resources/com/google/flatbuffers/FlatBuffers.gwt.xml new file mode 100644 index 00000000000..34f7a7661ba --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/FlatBuffers.gwt.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java new file mode 100644 index 00000000000..5e46eedb823 --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java @@ -0,0 +1,252 @@ +package com.google.flatbuffers; + +import java.nio.Numbers; +import java.util.Arrays; + +/** + * Implements {@code ReadBuf} using an array of bytes + * as a backing storage. Using array of bytes are + * usually faster than {@code ByteBuffer}. + * + * This class is not thread-safe, meaning that + * it must operate on a single thread. Operating from + * multiple thread leads into a undefined behavior + */ +public class ArrayReadWriteBuf implements ReadWriteBuf { + + private byte[] buffer; + private int writePos; + + public ArrayReadWriteBuf() { + this(10); + } + + public ArrayReadWriteBuf(int initialCapacity) { + this(new byte[initialCapacity]); + } + + public ArrayReadWriteBuf(byte[] buffer) { + this.buffer = buffer; + this.writePos = 0; + } + + public ArrayReadWriteBuf(byte[] buffer, int startPos) { + this.buffer = buffer; + this.writePos = startPos; + } + + public void clear() { + this.writePos = 0; + } + + @Override + public boolean getBoolean(int index) { + return buffer[index] != 0; + } + + @Override + public byte get(int index) { + return buffer[index]; + } + + @Override + public short getShort(int index) { + return (short) ((buffer[index+ 1] << 8) | (buffer[index] & 0xff)); + } + + @Override + public int getInt(int index) { + return (((buffer[index + 3]) << 24) | + ((buffer[index + 2] & 0xff) << 16) | + ((buffer[index + 1] & 0xff) << 8) | + ((buffer[index] & 0xff))); + } + + @Override + public long getLong(int index) { + return ((((long) buffer[index++] & 0xff)) | + (((long) buffer[index++] & 0xff) << 8) | + (((long) buffer[index++] & 0xff) << 16) | + (((long) buffer[index++] & 0xff) << 24) | + (((long) buffer[index++] & 0xff) << 32) | + (((long) buffer[index++] & 0xff) << 40) | + (((long) buffer[index++] & 0xff) << 48) | + (((long) buffer[index]) << 56)); + } + + @Override + public float getFloat(int index) { + return Float.intBitsToFloat(getInt(index)); + } + + @Override + public double getDouble(int index) { + return Double.longBitsToDouble(getLong(index)); + } + + @Override + public String getString(int start, int size) { + return Utf8Safe.decodeUtf8Array(buffer, start, size); + } + + @Override + public byte[] data() { + return buffer; + } + + + @Override + public void putBoolean(boolean value) { + setBoolean(writePos, value); + writePos++; + } + + @Override + public void put(byte[] value, int start, int length) { + set(writePos, value, start, length); + writePos+=length; + } + + @Override + public void put(byte value) { + set(writePos, value); + writePos++; + } + + @Override + public void putShort(short value) { + setShort(writePos, value); + writePos +=2; + } + + @Override + public void putInt(int value) { + setInt(writePos, value); + writePos +=4; + } + + @Override + public void putLong(long value) { + setLong(writePos, value); + writePos +=8; + } + + @Override + public void putFloat(float value) { + setFloat(writePos, value); + writePos +=4; + } + + @Override + public void putDouble(double value) { + setDouble(writePos, value); + writePos +=8; + } + + @Override + public void setBoolean(int index, boolean value) { + set(index, value ? (byte)1 : (byte)0); + } + + @Override + public void set(int index, byte value) { + requestCapacity(index + 1); + buffer[index] = value; + } + + @Override + public void set(int index, byte[] toCopy, int start, int length) { + requestCapacity(index + (length - start)); + System.arraycopy(toCopy, start, buffer, index, length); + } + + @Override + public void setShort(int index, short value) { + requestCapacity(index + 2); + + buffer[index++] = (byte) ((value) & 0xff); + buffer[index ] = (byte) ((value >> 8) & 0xff); + } + + @Override + public void setInt(int index, int value) { + requestCapacity(index + 4); + + buffer[index++] = (byte) ((value) & 0xff); + buffer[index++] = (byte) ((value >> 8) & 0xff); + buffer[index++] = (byte) ((value >> 16) & 0xff); + buffer[index ] = (byte) ((value >> 24) & 0xff); + } + + @Override + public void setLong(int index, long value) { + requestCapacity(index + 8); + + int i = (int) value; + buffer[index++] = (byte) ((i) & 0xff); + buffer[index++] = (byte) ((i >> 8) & 0xff); + buffer[index++] = (byte) ((i >> 16) & 0xff); + buffer[index++] = (byte) ((i >> 24) & 0xff); + i = (int) (value >> 32); + buffer[index++] = (byte) ((i) & 0xff); + buffer[index++] = (byte) ((i >> 8) & 0xff); + buffer[index++] = (byte) ((i >> 16) & 0xff); + buffer[index ] = (byte) ((i >> 24) & 0xff); + } + + @Override + public void setFloat(int index, float value) { + requestCapacity(index + 4); + + int iValue = java.nio.Numbers.floatToIntBits(value); + buffer[index++] = (byte) ((iValue) & 0xff); + buffer[index++] = (byte) ((iValue >> 8) & 0xff); + buffer[index++] = (byte) ((iValue >> 16) & 0xff); + buffer[index ] = (byte) ((iValue >> 24) & 0xff); + } + + @Override + public void setDouble(int index, double value) { + requestCapacity(index + 8); + + long lValue = java.nio.Numbers.doubleToRawLongBits(value); + int i = (int) lValue; + buffer[index++] = (byte) ((i) & 0xff); + buffer[index++] = (byte) ((i >> 8) & 0xff); + buffer[index++] = (byte) ((i >> 16) & 0xff); + buffer[index++] = (byte) ((i >> 24) & 0xff); + i = (int) (lValue >> 32); + buffer[index++] = (byte) ((i) & 0xff); + buffer[index++] = (byte) ((i >> 8) & 0xff); + buffer[index++] = (byte) ((i >> 16) & 0xff); + buffer[index ] = (byte) ((i >> 24) & 0xff); + } + + @Override + public int limit() { + return writePos; + } + + @Override + public int writePosition() { + return writePos; + } + + @Override + public boolean requestCapacity(int capacity) { + if (capacity < 0) { + throw new IllegalArgumentException("Capacity may not be negative (likely a previous int overflow)"); + } + if (buffer.length >= capacity) { + return true; + } + // implemented in the same growing fashion as ArrayList + int oldCapacity = buffer.length; + int newCapacity = oldCapacity + (oldCapacity >> 1); + if (newCapacity < capacity) { // Note: this also catches newCapacity int overflow + newCapacity = capacity; + } + buffer = Arrays.copyOf(buffer, newCapacity); + return true; + } +} diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Old.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Old.java new file mode 100644 index 00000000000..615b615dd38 --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Old.java @@ -0,0 +1,8 @@ +package com.google.flatbuffers; + +/** + * Empty implementation since this isn't supported in GWT. + */ +public abstract class Utf8Old extends Utf8 { + +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Safe.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Safe.java new file mode 100644 index 00000000000..00625e3150f --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/Utf8Safe.java @@ -0,0 +1,448 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package com.google.flatbuffers; + +import java.nio.ByteBuffer; +import static java.lang.Character.MAX_SURROGATE; +import static java.lang.Character.MIN_SUPPLEMENTARY_CODE_POINT; +import static java.lang.Character.MIN_SURROGATE; +import static java.lang.Character.isSurrogatePair; +import static java.lang.Character.toCodePoint; + +/** + * A set of low-level, high-performance static utility methods related + * to the UTF-8 character encoding. This class has no dependencies + * outside of the core JDK libraries. + * + *

There are several variants of UTF-8. The one implemented by + * this class is the restricted definition of UTF-8 introduced in + * Unicode 3.1, which mandates the rejection of "overlong" byte + * sequences as well as rejection of 3-byte surrogate codepoint byte + * sequences. Note that the UTF-8 decoder included in Oracle's JDK + * has been modified to also reject "overlong" byte sequences, but (as + * of 2011) still accepts 3-byte surrogate codepoint byte sequences. + * + *

The byte sequences considered valid by this class are exactly + * those that can be roundtrip converted to Strings and back to bytes + * using the UTF-8 charset, without loss:

 {@code
+ * Arrays.equals(bytes, new String(bytes, Internal.UTF_8).getBytes(Internal.UTF_8))
+ * }
+ * + *

See the Unicode Standard,
+ * Table 3-6. UTF-8 Bit Distribution,
+ * Table 3-7. Well Formed UTF-8 Byte Sequences. + */ +final public class Utf8Safe extends Utf8 { + + /** + * Returns the number of bytes in the UTF-8-encoded form of {@code sequence}. For a string, + * this method is equivalent to {@code string.getBytes(UTF_8).length}, but is more efficient in + * both time and space. + * + * @throws IllegalArgumentException if {@code sequence} contains ill-formed UTF-16 (unpaired + * surrogates) + */ + private static int computeEncodedLength(CharSequence sequence) { + // Warning to maintainers: this implementation is highly optimized. + int utf16Length = sequence.length(); + int utf8Length = utf16Length; + int i = 0; + + // This loop optimizes for pure ASCII. + while (i < utf16Length && sequence.charAt(i) < 0x80) { + i++; + } + + // This loop optimizes for chars less than 0x800. + for (; i < utf16Length; i++) { + char c = sequence.charAt(i); + if (c < 0x800) { + utf8Length += ((0x7f - c) >>> 31); // branch free! + } else { + utf8Length += encodedLengthGeneral(sequence, i); + break; + } + } + + if (utf8Length < utf16Length) { + // Necessary and sufficient condition for overflow because of maximum 3x expansion + throw new IllegalArgumentException("UTF-8 length does not fit in int: " + + (utf8Length + (1L << 32))); + } + return utf8Length; + } + + private static int encodedLengthGeneral(CharSequence sequence, int start) { + int utf16Length = sequence.length(); + int utf8Length = 0; + for (int i = start; i < utf16Length; i++) { + char c = sequence.charAt(i); + if (c < 0x800) { + utf8Length += (0x7f - c) >>> 31; // branch free! + } else { + utf8Length += 2; + // jdk7+: if (Character.isSurrogate(c)) { + if (Character.MIN_SURROGATE <= c && c <= Character.MAX_SURROGATE) { + // Check that we have a well-formed surrogate pair. + int cp = Character.codePointAt(sequence, i); + if (cp < MIN_SUPPLEMENTARY_CODE_POINT) { + throw new Utf8Safe.UnpairedSurrogateException(i, utf16Length); + } + i++; + } + } + } + return utf8Length; + } + + public static String decodeUtf8Array(byte[] bytes, int index, int size) { + // Bitwise OR combines the sign bits so any negative value fails the check. + if ((index | size | bytes.length - index - size) < 0) { + throw new ArrayIndexOutOfBoundsException("buffer length=" + bytes.length + ", index=" + index + ", size=" + size); + } + + int offset = index; + final int limit = offset + size; + + // The longest possible resulting String is the same as the number of input bytes, when it is + // all ASCII. For other cases, this over-allocates and we will truncate in the end. + char[] resultArr = new char[size]; + int resultPos = 0; + + // Optimize for 100% ASCII (Hotspot loves small simple top-level loops like this). + // This simple loop stops when we encounter a byte >= 0x80 (i.e. non-ASCII). + while (offset < limit) { + byte b = bytes[offset]; + if (!DecodeUtil.isOneByte(b)) { + break; + } + offset++; + DecodeUtil.handleOneByte(b, resultArr, resultPos++); + } + + while (offset < limit) { + byte byte1 = bytes[offset++]; + if (DecodeUtil.isOneByte(byte1)) { + DecodeUtil.handleOneByte(byte1, resultArr, resultPos++); + // It's common for there to be multiple ASCII characters in a run mixed in, so add an + // extra optimized loop to take care of these runs. + while (offset < limit) { + byte b = bytes[offset]; + if (!DecodeUtil.isOneByte(b)) { + break; + } + offset++; + DecodeUtil.handleOneByte(b, resultArr, resultPos++); + } + } else if (DecodeUtil.isTwoBytes(byte1)) { + if (offset >= limit) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleTwoBytes(byte1, /* byte2 */ bytes[offset++], resultArr, resultPos++); + } else if (DecodeUtil.isThreeBytes(byte1)) { + if (offset >= limit - 1) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleThreeBytes( + byte1, + /* byte2 */ bytes[offset++], + /* byte3 */ bytes[offset++], + resultArr, + resultPos++); + } else { + if (offset >= limit - 2) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleFourBytes( + byte1, + /* byte2 */ bytes[offset++], + /* byte3 */ bytes[offset++], + /* byte4 */ bytes[offset++], + resultArr, + resultPos++); + // 4-byte case requires two chars. + resultPos++; + } + } + + return new String(resultArr, 0, resultPos); + } + + public static String decodeUtf8Buffer(ByteBuffer buffer, int offset, + int length) { + // Bitwise OR combines the sign bits so any negative value fails the check. + if ((offset | length | buffer.limit() - offset - length) < 0) { + throw new ArrayIndexOutOfBoundsException("buffer limit=" + buffer.limit() + ", index=" + offset + ", limit=" + length); + } + + final int limit = offset + length; + + // The longest possible resulting String is the same as the number of input bytes, when it is + // all ASCII. For other cases, this over-allocates and we will truncate in the end. + char[] resultArr = new char[length]; + int resultPos = 0; + + // Optimize for 100% ASCII (Hotspot loves small simple top-level loops like this). + // This simple loop stops when we encounter a byte >= 0x80 (i.e. non-ASCII). + while (offset < limit) { + byte b = buffer.get(offset); + if (!DecodeUtil.isOneByte(b)) { + break; + } + offset++; + DecodeUtil.handleOneByte(b, resultArr, resultPos++); + } + + while (offset < limit) { + byte byte1 = buffer.get(offset++); + if (DecodeUtil.isOneByte(byte1)) { + DecodeUtil.handleOneByte(byte1, resultArr, resultPos++); + // It's common for there to be multiple ASCII characters in a run mixed in, so add an + // extra optimized loop to take care of these runs. + while (offset < limit) { + byte b = buffer.get(offset); + if (!DecodeUtil.isOneByte(b)) { + break; + } + offset++; + DecodeUtil.handleOneByte(b, resultArr, resultPos++); + } + } else if (DecodeUtil.isTwoBytes(byte1)) { + if (offset >= limit) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleTwoBytes( + byte1, /* byte2 */ buffer.get(offset++), resultArr, resultPos++); + } else if (DecodeUtil.isThreeBytes(byte1)) { + if (offset >= limit - 1) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleThreeBytes( + byte1, + /* byte2 */ buffer.get(offset++), + /* byte3 */ buffer.get(offset++), + resultArr, + resultPos++); + } else { + if (offset >= limit - 2) { + throw new IllegalArgumentException("Invalid UTF-8"); + } + DecodeUtil.handleFourBytes( + byte1, + /* byte2 */ buffer.get(offset++), + /* byte3 */ buffer.get(offset++), + /* byte4 */ buffer.get(offset++), + resultArr, + resultPos++); + // 4-byte case requires two chars. + resultPos++; + } + } + + return new String(resultArr, 0, resultPos); + } + + @Override + public int encodedLength(CharSequence in) { + return computeEncodedLength(in); + } + + /** + * Decodes the given UTF-8 portion of the {@link ByteBuffer} into a {@link String}. + * + * @throws IllegalArgumentException if the input is not valid UTF-8. + */ + @Override + public String decodeUtf8(ByteBuffer buffer, int offset, int length) + throws IllegalArgumentException { + if (buffer.hasArray()) { + return decodeUtf8Array(buffer.array(), buffer.arrayOffset() + offset, length); + } else { + return decodeUtf8Buffer(buffer, offset, length); + } + } + + + private static void encodeUtf8Buffer(CharSequence in, ByteBuffer out) { + final int inLength = in.length(); + int outIx = out.position(); + int inIx = 0; + + // Since ByteBuffer.putXXX() already checks boundaries for us, no need to explicitly check + // access. Assume the buffer is big enough and let it handle the out of bounds exception + // if it occurs. + try { + // Designed to take advantage of + // https://wikis.oracle.com/display/HotSpotInternals/RangeCheckElimination + for (char c; inIx < inLength && (c = in.charAt(inIx)) < 0x80; ++inIx) { + out.put(outIx + inIx, (byte) c); + } + if (inIx == inLength) { + // Successfully encoded the entire string. + out.position(outIx + inIx); + return; + } + + outIx += inIx; + for (char c; inIx < inLength; ++inIx, ++outIx) { + c = in.charAt(inIx); + if (c < 0x80) { + // One byte (0xxx xxxx) + out.put(outIx, (byte) c); + } else if (c < 0x800) { + // Two bytes (110x xxxx 10xx xxxx) + + // Benchmarks show put performs better than putShort here (for HotSpot). + out.put(outIx++, (byte) (0xC0 | (c >>> 6))); + out.put(outIx, (byte) (0x80 | (0x3F & c))); + } else if (c < MIN_SURROGATE || MAX_SURROGATE < c) { + // Three bytes (1110 xxxx 10xx xxxx 10xx xxxx) + // Maximum single-char code point is 0xFFFF, 16 bits. + + // Benchmarks show put performs better than putShort here (for HotSpot). + out.put(outIx++, (byte) (0xE0 | (c >>> 12))); + out.put(outIx++, (byte) (0x80 | (0x3F & (c >>> 6)))); + out.put(outIx, (byte) (0x80 | (0x3F & c))); + } else { + // Four bytes (1111 xxxx 10xx xxxx 10xx xxxx 10xx xxxx) + + // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, four UTF-8 + // bytes + final char low; + if (inIx + 1 == inLength || !isSurrogatePair(c, (low = in.charAt(++inIx)))) { + throw new UnpairedSurrogateException(inIx, inLength); + } + // TODO(nathanmittler): Consider using putInt() to improve performance. + int codePoint = toCodePoint(c, low); + out.put(outIx++, (byte) ((0xF << 4) | (codePoint >>> 18))); + out.put(outIx++, (byte) (0x80 | (0x3F & (codePoint >>> 12)))); + out.put(outIx++, (byte) (0x80 | (0x3F & (codePoint >>> 6)))); + out.put(outIx, (byte) (0x80 | (0x3F & codePoint))); + } + } + + // Successfully encoded the entire string. + out.position(outIx); + } catch (IndexOutOfBoundsException e) { + // TODO(nathanmittler): Consider making the API throw IndexOutOfBoundsException instead. + + // If we failed in the outer ASCII loop, outIx will not have been updated. In this case, + // use inIx to determine the bad write index. + int badWriteIndex = out.position() + Math.max(inIx, outIx - out.position() + 1); + throw new ArrayIndexOutOfBoundsException( + "Failed writing " + in.charAt(inIx) + " at index " + badWriteIndex); + } + } + + private static int encodeUtf8Array(CharSequence in, byte[] out, + int offset, int length) { + int utf16Length = in.length(); + int j = offset; + int i = 0; + int limit = offset + length; + // Designed to take advantage of + // https://wikis.oracle.com/display/HotSpotInternals/RangeCheckElimination + for (char c; i < utf16Length && i + j < limit && (c = in.charAt(i)) < 0x80; i++) { + out[j + i] = (byte) c; + } + if (i == utf16Length) { + return j + utf16Length; + } + j += i; + for (char c; i < utf16Length; i++) { + c = in.charAt(i); + if (c < 0x80 && j < limit) { + out[j++] = (byte) c; + } else if (c < 0x800 && j <= limit - 2) { // 11 bits, two UTF-8 bytes + out[j++] = (byte) ((0xF << 6) | (c >>> 6)); + out[j++] = (byte) (0x80 | (0x3F & c)); + } else if ((c < Character.MIN_SURROGATE || Character.MAX_SURROGATE < c) && j <= limit - 3) { + // Maximum single-char code point is 0xFFFF, 16 bits, three UTF-8 bytes + out[j++] = (byte) ((0xF << 5) | (c >>> 12)); + out[j++] = (byte) (0x80 | (0x3F & (c >>> 6))); + out[j++] = (byte) (0x80 | (0x3F & c)); + } else if (j <= limit - 4) { + // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, + // four UTF-8 bytes + final char low; + if (i + 1 == in.length() + || !Character.isSurrogatePair(c, (low = in.charAt(++i)))) { + throw new UnpairedSurrogateException((i - 1), utf16Length); + } + int codePoint = Character.toCodePoint(c, low); + out[j++] = (byte) ((0xF << 4) | (codePoint >>> 18)); + out[j++] = (byte) (0x80 | (0x3F & (codePoint >>> 12))); + out[j++] = (byte) (0x80 | (0x3F & (codePoint >>> 6))); + out[j++] = (byte) (0x80 | (0x3F & codePoint)); + } else { + // If we are surrogates and we're not a surrogate pair, always throw an + // UnpairedSurrogateException instead of an ArrayOutOfBoundsException. + if ((Character.MIN_SURROGATE <= c && c <= Character.MAX_SURROGATE) + && (i + 1 == in.length() + || !Character.isSurrogatePair(c, in.charAt(i + 1)))) { + throw new UnpairedSurrogateException(i, utf16Length); + } + throw new ArrayIndexOutOfBoundsException("Failed writing " + c + " at index " + j); + } + } + return j; + } + + /** + * Encodes the given characters to the target {@link ByteBuffer} using UTF-8 encoding. + * + *

Selects an optimal algorithm based on the type of {@link ByteBuffer} (i.e. heap or direct) + * and the capabilities of the platform. + * + * @param in the source string to be encoded + * @param out the target buffer to receive the encoded string. + */ + @Override + public void encodeUtf8(CharSequence in, ByteBuffer out) { + if (out.hasArray()) { + int start = out.arrayOffset(); + int end = encodeUtf8Array(in, out.array(), start + out.position(), + out.remaining()); + out.position(end - start); + } else { + encodeUtf8Buffer(in, out); + } + } + + // These UTF-8 handling methods are copied from Guava's Utf8Unsafe class with + // a modification to throw a local exception. This exception can be caught + // to fallback to more lenient behavior. + static class UnpairedSurrogateException extends IllegalArgumentException { + UnpairedSurrogateException(int index, int length) { + super("Unpaired surrogate at index " + index + " of " + length); + } + } +} diff --git a/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml b/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml new file mode 100644 index 00000000000..98ceaadd944 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml b/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml new file mode 100644 index 00000000000..98ceaadd944 --- /dev/null +++ b/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file From 9a7813975f7d46c1824a48c8ea6e04ac3695f003 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 29 Dec 2023 15:40:16 -0600 Subject: [PATCH 002/219] Be more explicit about copying data and managing alignment Also removes more flatbuffer code that we don't need now that we're using the java artifacts Compiles and runs correctly, though tests are still a little light. --- .../raw-js-openapi/flight_format/Message.fbs | 138 - proto/raw-js-openapi/flight_format/README.md | 3 - proto/raw-js-openapi/flight_format/Schema.fbs | 430 --- proto/raw-js-openapi/package.json | 3 - .../arrow/flight/flatbuf/Message_generated.ts | 741 ----- .../arrow/flight/flatbuf/Schema_generated.ts | 2807 ----------------- proto/raw-js-openapi/src/index.js | 15 - .../web/client/api/WorkerConnection.java | 12 +- .../client/api/barrage/WebBarrageUtils.java | 99 +- .../TableViewportSubscription.java | 25 +- .../web/client/api/tree/JsTreeTable.java | 10 +- .../google/flatbuffers/ArrayReadWriteBuf.java | 6 +- 12 files changed, 71 insertions(+), 4218 deletions(-) delete mode 100644 proto/raw-js-openapi/flight_format/Message.fbs delete mode 100644 proto/raw-js-openapi/flight_format/README.md delete mode 100644 proto/raw-js-openapi/flight_format/Schema.fbs delete mode 100644 proto/raw-js-openapi/src/arrow/flight/flatbuf/Message_generated.ts delete mode 100644 proto/raw-js-openapi/src/arrow/flight/flatbuf/Schema_generated.ts diff --git a/proto/raw-js-openapi/flight_format/Message.fbs b/proto/raw-js-openapi/flight_format/Message.fbs deleted file mode 100644 index b93c9e991e9..00000000000 --- a/proto/raw-js-openapi/flight_format/Message.fbs +++ /dev/null @@ -1,138 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -include "Schema.fbs"; - -namespace org.apache.arrow.flatbuf; - -/// ---------------------------------------------------------------------- -/// Data structures for describing a table row batch (a collection of -/// equal-length Arrow arrays) - -/// Metadata about a field at some level of a nested type tree (but not -/// its children). -/// -/// For example, a List with values `[[1, 2, 3], null, [4], [5, 6], null]` -/// would have {length: 5, null_count: 2} for its List node, and {length: 6, -/// null_count: 0} for its Int16 node, as separate FieldNode structs -struct FieldNode { - /// The number of value slots in the Arrow array at this level of a nested - /// tree - length: long; - - /// The number of observed nulls. Fields with null_count == 0 may choose not - /// to write their physical validity bitmap out as a materialized buffer, - /// instead setting the length of the bitmap buffer to 0. - null_count: long; -} - -enum CompressionType:byte { - // LZ4 frame format, for portability, as provided by lz4frame.h or wrappers - // thereof. Not to be confused with "raw" (also called "block") format - // provided by lz4.h - LZ4_FRAME, - - // Zstandard - ZSTD -} - -/// Provided for forward compatibility in case we need to support different -/// strategies for compressing the IPC message body (like whole-body -/// compression rather than buffer-level) in the future -enum BodyCompressionMethod:byte { - /// Each constituent buffer is first compressed with the indicated - /// compressor, and then written with the uncompressed length in the first 8 - /// bytes as a 64-bit little-endian signed integer followed by the compressed - /// buffer bytes (and then padding as required by the protocol). The - /// uncompressed length may be set to -1 to indicate that the data that - /// follows is not compressed, which can be useful for cases where - /// compression does not yield appreciable savings. - BUFFER -} - -/// Optional compression for the memory buffers constituting IPC message -/// bodies. Intended for use with RecordBatch but could be used for other -/// message types -table BodyCompression { - /// Compressor library - codec: CompressionType = LZ4_FRAME; - - /// Indicates the way the record batch body was compressed - method: BodyCompressionMethod = BUFFER; -} - -/// A data header describing the shared memory layout of a "record" or "row" -/// batch. Some systems call this a "row batch" internally and others a "record -/// batch". -table RecordBatch { - /// number of records / rows. The arrays in the batch should all have this - /// length - length: long; - - /// Nodes correspond to the pre-ordered flattened logical schema - nodes: [FieldNode]; - - /// Buffers correspond to the pre-ordered flattened buffer tree - /// - /// The number of buffers appended to this list depends on the schema. For - /// example, most primitive arrays will have 2 buffers, 1 for the validity - /// bitmap and 1 for the values. For struct arrays, there will only be a - /// single buffer for the validity (nulls) bitmap - buffers: [Buffer]; - - /// Optional compression of the message body - compression: BodyCompression; -} - -/// For sending dictionary encoding information. Any Field can be -/// dictionary-encoded, but in this case none of its children may be -/// dictionary-encoded. -/// There is one vector / column per dictionary, but that vector / column -/// may be spread across multiple dictionary batches by using the isDelta -/// flag - -table DictionaryBatch { - id: long; - data: RecordBatch; - - /// If isDelta is true the values in the dictionary are to be appended to a - /// dictionary with the indicated id. If isDelta is false this dictionary - /// should replace the existing dictionary. - isDelta: bool = false; -} - -/// ---------------------------------------------------------------------- -/// The root Message type - -/// This union enables us to easily send different message types without -/// redundant storage, and in the future we can easily add new message types. -/// -/// Arrow implementations do not need to implement all of the message types, -/// which may include experimental metadata types. For maximum compatibility, -/// it is best to send data using RecordBatch -union MessageHeader { - Schema, DictionaryBatch, RecordBatch -} - -table Message { - version: org.apache.arrow.flatbuf.MetadataVersion; - header: MessageHeader; - bodyLength: long; - custom_metadata: [ KeyValue ]; -} - -root_type Message; diff --git a/proto/raw-js-openapi/flight_format/README.md b/proto/raw-js-openapi/flight_format/README.md deleted file mode 100644 index 52d45ad2b7e..00000000000 --- a/proto/raw-js-openapi/flight_format/README.md +++ /dev/null @@ -1,3 +0,0 @@ -Currently we are generating flatbuffer files manually. See [deephaven-core/#1052](https://github.com/deephaven/deephaven-core/issues/1052) to track the work to automate this. - -${FLATC} --ts --no-fb-import --no-ts-reexport -o src/arrow/flight/flatbuf/ flight_format/\*.fbs diff --git a/proto/raw-js-openapi/flight_format/Schema.fbs b/proto/raw-js-openapi/flight_format/Schema.fbs deleted file mode 100644 index 2d447d30791..00000000000 --- a/proto/raw-js-openapi/flight_format/Schema.fbs +++ /dev/null @@ -1,430 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -/// Logical types, vector layouts, and schemas - -namespace org.apache.arrow.flatbuf; - -enum MetadataVersion:short { - /// 0.1.0 (October 2016). - V1, - - /// 0.2.0 (February 2017). Non-backwards compatible with V1. - V2, - - /// 0.3.0 -> 0.7.1 (May - December 2017). Non-backwards compatible with V2. - V3, - - /// >= 0.8.0 (December 2017). Non-backwards compatible with V3. - V4, - - /// >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4 - /// metadata and IPC messages). Implementations are recommended to provide a - /// V4 compatibility mode with V5 format changes disabled. - /// - /// Incompatible changes between V4 and V5: - /// - Union buffer layout has changed. In V5, Unions don't have a validity - /// bitmap buffer. - V5, -} - -/// Represents Arrow Features that might not have full support -/// within implementations. This is intended to be used in -/// two scenarios: -/// 1. A mechanism for readers of Arrow Streams -/// and files to understand that the stream or file makes -/// use of a feature that isn't supported or unknown to -/// the implementation (and therefore can meet the Arrow -/// forward compatibility guarantees). -/// 2. A means of negotiating between a client and server -/// what features a stream is allowed to use. The enums -/// values here are intented to represent higher level -/// features, additional details maybe negotiated -/// with key-value pairs specific to the protocol. -/// -/// Enums added to this list should be assigned power-of-two values -/// to facilitate exchanging and comparing bitmaps for supported -/// features. -enum Feature : long { - /// Needed to make flatbuffers happy. - UNUSED = 0, - /// The stream makes use of multiple full dictionaries with the - /// same ID and assumes clients implement dictionary replacement - /// correctly. - DICTIONARY_REPLACEMENT = 1, - /// The stream makes use of compressed bodies as described - /// in Message.fbs. - COMPRESSED_BODY = 2 -} - -/// These are stored in the flatbuffer in the Type union below - -table Null { -} - -/// A Struct_ in the flatbuffer metadata is the same as an Arrow Struct -/// (according to the physical memory layout). We used Struct_ here as -/// Struct is a reserved word in Flatbuffers -table Struct_ { -} - -table List { -} - -/// Same as List, but with 64-bit offsets, allowing to represent -/// extremely large data values. -table LargeList { -} - -table FixedSizeList { - /// Number of list items per value - listSize: int; -} - -/// A Map is a logical nested type that is represented as -/// -/// List> -/// -/// In this layout, the keys and values are each respectively contiguous. We do -/// not constrain the key and value types, so the application is responsible -/// for ensuring that the keys are hashable and unique. Whether the keys are sorted -/// may be set in the metadata for this field. -/// -/// In a field with Map type, the field has a child Struct field, which then -/// has two children: key type and the second the value type. The names of the -/// child fields may be respectively "entries", "key", and "value", but this is -/// not enforced. -/// -/// Map -/// ```text -/// - child[0] entries: Struct -/// - child[0] key: K -/// - child[1] value: V -/// ``` -/// Neither the "entries" field nor the "key" field may be nullable. -/// -/// The metadata is structured so that Arrow systems without special handling -/// for Map can make Map an alias for List. The "layout" attribute for the Map -/// field must have the same contents as a List. -table Map { - /// Set to true if the keys within each value are sorted - keysSorted: bool; -} - -enum UnionMode:short { Sparse, Dense } - -/// A union is a complex type with children in Field -/// By default ids in the type vector refer to the offsets in the children -/// optionally typeIds provides an indirection between the child offset and the type id -/// for each child `typeIds[offset]` is the id used in the type vector -table Union { - mode: UnionMode; - typeIds: [ int ]; // optional, describes typeid of each child. -} - -table Int { - bitWidth: int; // restricted to 8, 16, 32, and 64 in v1 - is_signed: bool; -} - -enum Precision:short {HALF, SINGLE, DOUBLE} - -table FloatingPoint { - precision: Precision; -} - -/// Unicode with UTF-8 encoding -table Utf8 { -} - -/// Opaque binary data -table Binary { -} - -/// Same as Utf8, but with 64-bit offsets, allowing to represent -/// extremely large data values. -table LargeUtf8 { -} - -/// Same as Binary, but with 64-bit offsets, allowing to represent -/// extremely large data values. -table LargeBinary { -} - -table FixedSizeBinary { - /// Number of bytes per value - byteWidth: int; -} - -table Bool { -} - -/// Exact decimal value represented as an integer value in two's -/// complement. Currently only 128-bit (16-byte) and 256-bit (32-byte) integers -/// are used. The representation uses the endianness indicated -/// in the Schema. -table Decimal { - /// Total number of decimal digits - precision: int; - - /// Number of digits after the decimal point "." - scale: int; - - /// Number of bits per value. The only accepted widths are 128 and 256. - /// We use bitWidth for consistency with Int::bitWidth. - bitWidth: int = 128; -} - -enum DateUnit: short { - DAY, - MILLISECOND -} - -/// Date is either a 32-bit or 64-bit type representing elapsed time since UNIX -/// epoch (1970-01-01), stored in either of two units: -/// -/// * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no -/// leap seconds), where the values are evenly divisible by 86400000 -/// * Days (32 bits) since the UNIX epoch -table Date { - unit: DateUnit = MILLISECOND; -} - -enum TimeUnit: short { SECOND, MILLISECOND, MICROSECOND, NANOSECOND } - -/// Time type. The physical storage type depends on the unit -/// - SECOND and MILLISECOND: 32 bits -/// - MICROSECOND and NANOSECOND: 64 bits -table Time { - unit: TimeUnit = MILLISECOND; - bitWidth: int = 32; -} - -/// Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding -/// leap seconds, as a 64-bit integer. Note that UNIX time does not include -/// leap seconds. -/// -/// Date & time libraries often have multiple different data types for temporal -/// data. In order to ease interoperability between different implementations the -/// Arrow project has some recommendations for encoding these types into a Timestamp -/// column. -/// -/// An "instant" represents a single moment in time that has no meaningful time zone -/// or the time zone is unknown. A column of instants can also contain values from -/// multiple time zones. To encode an instant set the timezone string to "UTC". -/// -/// A "zoned date-time" represents a single moment in time that has a meaningful -/// reference time zone. To encode a zoned date-time as a Timestamp set the timezone -/// string to the name of the timezone. There is some ambiguity between an instant -/// and a zoned date-time with the UTC time zone. Both of these are stored the same. -/// Typically, this distinction does not matter. If it does, then an application should -/// use custom metadata or an extension type to distinguish between the two cases. -/// -/// An "offset date-time" represents a single moment in time combined with a meaningful -/// offset from UTC. To encode an offset date-time as a Timestamp set the timezone string -/// to the numeric time zone offset string (e.g. "+03:00"). -/// -/// A "local date-time" does not represent a single moment in time. It represents a wall -/// clock time combined with a date. Because of daylight savings time there may multiple -/// instants that correspond to a single local date-time in any given time zone. A -/// local date-time is often stored as a struct or a Date32/Time64 pair. However, it can -/// also be encoded into a Timestamp column. To do so the value should be the the time -/// elapsed from the Unix epoch so that a wall clock in UTC would display the desired time. -/// The timezone string should be set to null or the empty string. -table Timestamp { - unit: TimeUnit; - - /// The time zone is a string indicating the name of a time zone, one of: - /// - /// * As used in the Olson time zone database (the "tz database" or - /// "tzdata"), such as "America/New_York" - /// * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30 - /// - /// Whether a timezone string is present indicates different semantics about - /// the data: - /// - /// * If the time zone is null or an empty string, the data is a local date-time - /// and does not represent a single moment in time. Instead it represents a wall clock - /// time and care should be taken to avoid interpreting it semantically as an instant. - /// - /// * If the time zone is set to a valid value, values can be displayed as - /// "localized" to that time zone, even though the underlying 64-bit - /// integers are identical to the same data stored in UTC. Converting - /// between time zones is a metadata-only operation and does not change the - /// underlying values - timezone: string; -} - -enum IntervalUnit: short { YEAR_MONTH, DAY_TIME} -// A "calendar" interval which models types that don't necessarily -// have a precise duration without the context of a base timestamp (e.g. -// days can differ in length during day light savings time transitions). -// YEAR_MONTH - Indicates the number of elapsed whole months, stored as -// 4-byte integers. -// DAY_TIME - Indicates the number of elapsed days and milliseconds, -// stored as 2 contiguous 32-bit integers (8-bytes in total). Support -// of this IntervalUnit is not required for full arrow compatibility. -table Interval { - unit: IntervalUnit; -} - -// An absolute length of time unrelated to any calendar artifacts. -// -// For the purposes of Arrow Implementations, adding this value to a Timestamp -// ("t1") naively (i.e. simply summing the two number) is acceptable even -// though in some cases the resulting Timestamp (t2) would not account for -// leap-seconds during the elapsed time between "t1" and "t2". Similarly, -// representing the difference between two Unix timestamp is acceptable, but -// would yield a value that is possibly a few seconds off from the true elapsed -// time. -// -// The resolution defaults to millisecond, but can be any of the other -// supported TimeUnit values as with Timestamp and Time types. This type is -// always represented as an 8-byte integer. -table Duration { - unit: TimeUnit = MILLISECOND; -} - -/// ---------------------------------------------------------------------- -/// Top-level Type value, enabling extensible type-specific metadata. We can -/// add new logical types to Type without breaking backwards compatibility - -union Type { - Null, - Int, - FloatingPoint, - Binary, - Utf8, - Bool, - Decimal, - Date, - Time, - Timestamp, - Interval, - List, - Struct_, - Union, - FixedSizeBinary, - FixedSizeList, - Map, - Duration, - LargeBinary, - LargeUtf8, - LargeList, -} - -/// ---------------------------------------------------------------------- -/// user defined key value pairs to add custom metadata to arrow -/// key namespacing is the responsibility of the user - -table KeyValue { - key: string; - value: string; -} - -/// ---------------------------------------------------------------------- -/// Dictionary encoding metadata -/// Maintained for forwards compatibility, in the future -/// Dictionaries might be explicit maps between integers and values -/// allowing for non-contiguous index values -enum DictionaryKind : short { DenseArray } -table DictionaryEncoding { - /// The known dictionary id in the application where this data is used. In - /// the file or streaming formats, the dictionary ids are found in the - /// DictionaryBatch messages - id: long; - - /// The dictionary indices are constrained to be non-negative integers. If - /// this field is null, the indices must be signed int32. To maximize - /// cross-language compatibility and performance, implementations are - /// recommended to prefer signed integer types over unsigned integer types - /// and to avoid uint64 indices unless they are required by an application. - indexType: Int; - - /// By default, dictionaries are not ordered, or the order does not have - /// semantic meaning. In some statistical, applications, dictionary-encoding - /// is used to represent ordered categorical data, and we provide a way to - /// preserve that metadata here - isOrdered: bool; - - dictionaryKind: DictionaryKind; -} - -/// ---------------------------------------------------------------------- -/// A field represents a named column in a record / row batch or child of a -/// nested type. - -table Field { - /// Name is not required, in i.e. a List - name: string; - - /// Whether or not this field can contain nulls. Should be true in general. - nullable: bool; - - /// This is the type of the decoded value if the field is dictionary encoded. - type: Type; - - /// Present only if the field is dictionary encoded. - dictionary: DictionaryEncoding; - - /// children apply only to nested data types like Struct, List and Union. For - /// primitive types children will have length 0. - children: [ Field ]; - - /// User-defined metadata - custom_metadata: [ KeyValue ]; -} - -/// ---------------------------------------------------------------------- -/// Endianness of the platform producing the data - -enum Endianness:short { Little, Big } - -/// ---------------------------------------------------------------------- -/// A Buffer represents a single contiguous memory segment -struct Buffer { - /// The relative offset into the shared memory page where the bytes for this - /// buffer starts - offset: long; - - /// The absolute length (in bytes) of the memory buffer. The memory is found - /// from offset (inclusive) to offset + length (non-inclusive). When building - /// messages using the encapsulated IPC message, padding bytes may be written - /// after a buffer, but such padding bytes do not need to be accounted for in - /// the size here. - length: long; -} - -/// ---------------------------------------------------------------------- -/// A Schema describes the columns in a row batch - -table Schema { - - /// endianness of the buffer - /// it is Little Endian by default - /// if endianness doesn't match the underlying system then the vectors need to be converted - endianness: Endianness=Little; - - fields: [Field]; - // User-defined metadata - custom_metadata: [ KeyValue ]; - - /// Features used in the stream/file. - features : [ Feature ]; -} - -root_type Schema; diff --git a/proto/raw-js-openapi/package.json b/proto/raw-js-openapi/package.json index 455c3d67837..fdb83f4ec86 100644 --- a/proto/raw-js-openapi/package.json +++ b/proto/raw-js-openapi/package.json @@ -1,9 +1,6 @@ { "dependencies": { - "@deephaven/barrage": "0.5.0", "@improbable-eng/grpc-web": "^0.14.0", - "apache-arrow": "7.0.0", - "flatbuffers": "1.12.0", "google-protobuf": "^3.20.1" }, "devDependencies": { diff --git a/proto/raw-js-openapi/src/arrow/flight/flatbuf/Message_generated.ts b/proto/raw-js-openapi/src/arrow/flight/flatbuf/Message_generated.ts deleted file mode 100644 index 8d86b6aa04f..00000000000 --- a/proto/raw-js-openapi/src/arrow/flight/flatbuf/Message_generated.ts +++ /dev/null @@ -1,741 +0,0 @@ -// automatically generated by the FlatBuffers compiler, do not modify - -import * as NS17716817176095924048 from "./Schema_generated"; -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum CompressionType{ - LZ4_FRAME= 0, - ZSTD= 1 -}; -} - -/** - * Provided for forward compatibility in case we need to support different - * strategies for compressing the IPC message body (like whole-body - * compression rather than buffer-level) in the future - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum BodyCompressionMethod{ - /** - * Each constituent buffer is first compressed with the indicated - * compressor, and then written with the uncompressed length in the first 8 - * bytes as a 64-bit little-endian signed integer followed by the compressed - * buffer bytes (and then padding as required by the protocol). The - * uncompressed length may be set to -1 to indicate that the data that - * follows is not compressed, which can be useful for cases where - * compression does not yield appreciable savings. - */ - BUFFER= 0 -}; -} - -/** - * ---------------------------------------------------------------------- - * The root Message type - * This union enables us to easily send different message types without - * redundant storage, and in the future we can easily add new message types. - * - * Arrow implementations do not need to implement all of the message types, - * which may include experimental metadata types. For maximum compatibility, - * it is best to send data using RecordBatch - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum MessageHeader{ - NONE= 0, - Schema= 1, - DictionaryBatch= 2, - RecordBatch= 3 -}; - -export function unionToMessageHeader( - type: MessageHeader, - accessor: (obj:NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch) => NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch|null -): NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch|null { - switch(org.apache.arrow.flatbuf.MessageHeader[type]) { - case 'NONE': return null; - case 'Schema': return accessor(new NS17716817176095924048.org.apache.arrow.flatbuf.Schema())! as NS17716817176095924048.org.apache.arrow.flatbuf.Schema; - case 'DictionaryBatch': return accessor(new org.apache.arrow.flatbuf.DictionaryBatch())! as org.apache.arrow.flatbuf.DictionaryBatch; - case 'RecordBatch': return accessor(new org.apache.arrow.flatbuf.RecordBatch())! as org.apache.arrow.flatbuf.RecordBatch; - default: return null; - } -} - -export function unionListToMessageHeader( - type: MessageHeader, - accessor: (index: number, obj:NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch) => NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch|null, - index: number -): NS17716817176095924048.org.apache.arrow.flatbuf.Schema|org.apache.arrow.flatbuf.DictionaryBatch|org.apache.arrow.flatbuf.RecordBatch|null { - switch(org.apache.arrow.flatbuf.MessageHeader[type]) { - case 'NONE': return null; - case 'Schema': return accessor(index, new NS17716817176095924048.org.apache.arrow.flatbuf.Schema())! as NS17716817176095924048.org.apache.arrow.flatbuf.Schema; - case 'DictionaryBatch': return accessor(index, new org.apache.arrow.flatbuf.DictionaryBatch())! as org.apache.arrow.flatbuf.DictionaryBatch; - case 'RecordBatch': return accessor(index, new org.apache.arrow.flatbuf.RecordBatch())! as org.apache.arrow.flatbuf.RecordBatch; - default: return null; - } -} -} - -/** - * ---------------------------------------------------------------------- - * Data structures for describing a table row batch (a collection of - * equal-length Arrow arrays) - * Metadata about a field at some level of a nested type tree (but not - * its children). - * - * For example, a List with values `[[1, 2, 3], null, [4], [5, 6], null]` - * would have {length: 5, null_count: 2} for its List node, and {length: 6, - * null_count: 0} for its Int16 node, as separate FieldNode structs - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class FieldNode { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns FieldNode - */ -__init(i:number, bb:flatbuffers.ByteBuffer):FieldNode { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * The number of value slots in the Arrow array at this level of a nested - * tree - * - * @returns flatbuffers.Long - */ -length():flatbuffers.Long { - return this.bb!.readInt64(this.bb_pos); -}; - -/** - * The number of observed nulls. Fields with null_count == 0 may choose not - * to write their physical validity bitmap out as a materialized buffer, - * instead setting the length of the bitmap buffer to 0. - * - * @returns flatbuffers.Long - */ -nullCount():flatbuffers.Long { - return this.bb!.readInt64(this.bb_pos + 8); -}; - -/** - * @returns number - */ -static sizeOf():number { - return 16; -} - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long length - * @param flatbuffers.Long null_count - * @returns flatbuffers.Offset - */ -static createFieldNode(builder:flatbuffers.Builder, length: flatbuffers.Long, null_count: flatbuffers.Long):flatbuffers.Offset { - builder.prep(8, 16); - builder.writeInt64(null_count); - builder.writeInt64(length); - return builder.offset(); -}; - -} -} -/** - * Optional compression for the memory buffers constituting IPC message - * bodies. Intended for use with RecordBatch but could be used for other - * message types - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class BodyCompression { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns BodyCompression - */ -__init(i:number, bb:flatbuffers.ByteBuffer):BodyCompression { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param BodyCompression= obj - * @returns BodyCompression - */ -static getRootAsBodyCompression(bb:flatbuffers.ByteBuffer, obj?:BodyCompression):BodyCompression { - return (obj || new BodyCompression()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param BodyCompression= obj - * @returns BodyCompression - */ -static getSizePrefixedRootAsBodyCompression(bb:flatbuffers.ByteBuffer, obj?:BodyCompression):BodyCompression { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new BodyCompression()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Compressor library - * - * @returns org.apache.arrow.flatbuf.CompressionType - */ -codec():org.apache.arrow.flatbuf.CompressionType { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.CompressionType.LZ4_FRAME; -}; - -/** - * Indicates the way the record batch body was compressed - * - * @returns org.apache.arrow.flatbuf.BodyCompressionMethod - */ -method():org.apache.arrow.flatbuf.BodyCompressionMethod { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? /** */ (this.bb!.readInt8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.BodyCompressionMethod.BUFFER; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startBodyCompression(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.CompressionType codec - */ -static addCodec(builder:flatbuffers.Builder, codec:org.apache.arrow.flatbuf.CompressionType) { - builder.addFieldInt8(0, codec, org.apache.arrow.flatbuf.CompressionType.LZ4_FRAME); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.BodyCompressionMethod method - */ -static addMethod(builder:flatbuffers.Builder, method:org.apache.arrow.flatbuf.BodyCompressionMethod) { - builder.addFieldInt8(1, method, org.apache.arrow.flatbuf.BodyCompressionMethod.BUFFER); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endBodyCompression(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createBodyCompression(builder:flatbuffers.Builder, codec:org.apache.arrow.flatbuf.CompressionType, method:org.apache.arrow.flatbuf.BodyCompressionMethod):flatbuffers.Offset { - BodyCompression.startBodyCompression(builder); - BodyCompression.addCodec(builder, codec); - BodyCompression.addMethod(builder, method); - return BodyCompression.endBodyCompression(builder); -} -} -} -/** - * A data header describing the shared memory layout of a "record" or "row" - * batch. Some systems call this a "row batch" internally and others a "record - * batch". - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class RecordBatch { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns RecordBatch - */ -__init(i:number, bb:flatbuffers.ByteBuffer):RecordBatch { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param RecordBatch= obj - * @returns RecordBatch - */ -static getRootAsRecordBatch(bb:flatbuffers.ByteBuffer, obj?:RecordBatch):RecordBatch { - return (obj || new RecordBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param RecordBatch= obj - * @returns RecordBatch - */ -static getSizePrefixedRootAsRecordBatch(bb:flatbuffers.ByteBuffer, obj?:RecordBatch):RecordBatch { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new RecordBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * number of records / rows. The arrays in the batch should all have this - * length - * - * @returns flatbuffers.Long - */ -length():flatbuffers.Long { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0); -}; - -/** - * Nodes correspond to the pre-ordered flattened logical schema - * - * @param number index - * @param org.apache.arrow.flatbuf.FieldNode= obj - * @returns org.apache.arrow.flatbuf.FieldNode - */ -nodes(index: number, obj?:org.apache.arrow.flatbuf.FieldNode):org.apache.arrow.flatbuf.FieldNode|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? (obj || new org.apache.arrow.flatbuf.FieldNode()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 16, this.bb!) : null; -}; - -/** - * @returns number - */ -nodesLength():number { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * Buffers correspond to the pre-ordered flattened buffer tree - * - * The number of buffers appended to this list depends on the schema. For - * example, most primitive arrays will have 2 buffers, 1 for the validity - * bitmap and 1 for the values. For struct arrays, there will only be a - * single buffer for the validity (nulls) bitmap - * - * @param number index - * @param org.apache.arrow.flatbuf.Buffer= obj - * @returns org.apache.arrow.flatbuf.Buffer - */ -buffers(index: number, obj?:NS17716817176095924048.org.apache.arrow.flatbuf.Buffer):NS17716817176095924048.org.apache.arrow.flatbuf.Buffer|null { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? (obj || new NS17716817176095924048.org.apache.arrow.flatbuf.Buffer()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 16, this.bb!) : null; -}; - -/** - * @returns number - */ -buffersLength():number { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * Optional compression of the message body - * - * @param org.apache.arrow.flatbuf.BodyCompression= obj - * @returns org.apache.arrow.flatbuf.BodyCompression|null - */ -compression(obj?:org.apache.arrow.flatbuf.BodyCompression):org.apache.arrow.flatbuf.BodyCompression|null { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? (obj || new org.apache.arrow.flatbuf.BodyCompression()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startRecordBatch(builder:flatbuffers.Builder) { - builder.startObject(4); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long length - */ -static addLength(builder:flatbuffers.Builder, length:flatbuffers.Long) { - builder.addFieldInt64(0, length, builder.createLong(0, 0)); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset nodesOffset - */ -static addNodes(builder:flatbuffers.Builder, nodesOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, nodesOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startNodesVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(16, numElems, 8); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset buffersOffset - */ -static addBuffers(builder:flatbuffers.Builder, buffersOffset:flatbuffers.Offset) { - builder.addFieldOffset(2, buffersOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startBuffersVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(16, numElems, 8); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset compressionOffset - */ -static addCompression(builder:flatbuffers.Builder, compressionOffset:flatbuffers.Offset) { - builder.addFieldOffset(3, compressionOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endRecordBatch(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -} -} -/** - * For sending dictionary encoding information. Any Field can be - * dictionary-encoded, but in this case none of its children may be - * dictionary-encoded. - * There is one vector / column per dictionary, but that vector / column - * may be spread across multiple dictionary batches by using the isDelta - * flag - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class DictionaryBatch { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns DictionaryBatch - */ -__init(i:number, bb:flatbuffers.ByteBuffer):DictionaryBatch { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param DictionaryBatch= obj - * @returns DictionaryBatch - */ -static getRootAsDictionaryBatch(bb:flatbuffers.ByteBuffer, obj?:DictionaryBatch):DictionaryBatch { - return (obj || new DictionaryBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param DictionaryBatch= obj - * @returns DictionaryBatch - */ -static getSizePrefixedRootAsDictionaryBatch(bb:flatbuffers.ByteBuffer, obj?:DictionaryBatch):DictionaryBatch { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new DictionaryBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns flatbuffers.Long - */ -id():flatbuffers.Long { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0); -}; - -/** - * @param org.apache.arrow.flatbuf.RecordBatch= obj - * @returns org.apache.arrow.flatbuf.RecordBatch|null - */ -data(obj?:org.apache.arrow.flatbuf.RecordBatch):org.apache.arrow.flatbuf.RecordBatch|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? (obj || new org.apache.arrow.flatbuf.RecordBatch()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null; -}; - -/** - * If isDelta is true the values in the dictionary are to be appended to a - * dictionary with the indicated id. If isDelta is false this dictionary - * should replace the existing dictionary. - * - * @returns boolean - */ -isDelta():boolean { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startDictionaryBatch(builder:flatbuffers.Builder) { - builder.startObject(3); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long id - */ -static addId(builder:flatbuffers.Builder, id:flatbuffers.Long) { - builder.addFieldInt64(0, id, builder.createLong(0, 0)); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset dataOffset - */ -static addData(builder:flatbuffers.Builder, dataOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, dataOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param boolean isDelta - */ -static addIsDelta(builder:flatbuffers.Builder, isDelta:boolean) { - builder.addFieldInt8(2, +isDelta, +false); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endDictionaryBatch(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Message { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Message - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Message { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Message= obj - * @returns Message - */ -static getRootAsMessage(bb:flatbuffers.ByteBuffer, obj?:Message):Message { - return (obj || new Message()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Message= obj - * @returns Message - */ -static getSizePrefixedRootAsMessage(bb:flatbuffers.ByteBuffer, obj?:Message):Message { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Message()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.MetadataVersion - */ -version():NS17716817176095924048.org.apache.arrow.flatbuf.MetadataVersion { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : NS17716817176095924048.org.apache.arrow.flatbuf.MetadataVersion.V1; -}; - -/** - * @returns org.apache.arrow.flatbuf.MessageHeader - */ -headerType():org.apache.arrow.flatbuf.MessageHeader { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? /** */ (this.bb!.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MessageHeader.NONE; -}; - -/** - * @param flatbuffers.Table obj - * @returns ?flatbuffers.Table - */ -header(obj:T):T|null { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? this.bb!.__union(obj, this.bb_pos + offset) : null; -}; - -/** - * @returns flatbuffers.Long - */ -bodyLength():flatbuffers.Long { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0); -}; - -/** - * @param number index - * @param org.apache.arrow.flatbuf.KeyValue= obj - * @returns org.apache.arrow.flatbuf.KeyValue - */ -customMetadata(index: number, obj?:NS17716817176095924048.org.apache.arrow.flatbuf.KeyValue):NS17716817176095924048.org.apache.arrow.flatbuf.KeyValue|null { - var offset = this.bb!.__offset(this.bb_pos, 12); - return offset ? (obj || new NS17716817176095924048.org.apache.arrow.flatbuf.KeyValue()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null; -}; - -/** - * @returns number - */ -customMetadataLength():number { - var offset = this.bb!.__offset(this.bb_pos, 12); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startMessage(builder:flatbuffers.Builder) { - builder.startObject(5); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.MetadataVersion version - */ -static addVersion(builder:flatbuffers.Builder, version:NS17716817176095924048.org.apache.arrow.flatbuf.MetadataVersion) { - builder.addFieldInt16(0, version, NS17716817176095924048.org.apache.arrow.flatbuf.MetadataVersion.V1); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.MessageHeader headerType - */ -static addHeaderType(builder:flatbuffers.Builder, headerType:org.apache.arrow.flatbuf.MessageHeader) { - builder.addFieldInt8(1, headerType, org.apache.arrow.flatbuf.MessageHeader.NONE); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset headerOffset - */ -static addHeader(builder:flatbuffers.Builder, headerOffset:flatbuffers.Offset) { - builder.addFieldOffset(2, headerOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long bodyLength - */ -static addBodyLength(builder:flatbuffers.Builder, bodyLength:flatbuffers.Long) { - builder.addFieldInt64(3, bodyLength, builder.createLong(0, 0)); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset customMetadataOffset - */ -static addCustomMetadata(builder:flatbuffers.Builder, customMetadataOffset:flatbuffers.Offset) { - builder.addFieldOffset(4, customMetadataOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createCustomMetadataVector(builder:flatbuffers.Builder, data:flatbuffers.Offset[]):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addOffset(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startCustomMetadataVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endMessage(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset offset - */ -static finishMessageBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) { - builder.finish(offset); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset offset - */ -static finishSizePrefixedMessageBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) { - builder.finish(offset, undefined, true); -}; - -static createMessage(builder:flatbuffers.Builder, version:NS17716817176095924048.org.apache.arrow.flatbuf.MetadataVersion, headerType:org.apache.arrow.flatbuf.MessageHeader, headerOffset:flatbuffers.Offset, bodyLength:flatbuffers.Long, customMetadataOffset:flatbuffers.Offset):flatbuffers.Offset { - Message.startMessage(builder); - Message.addVersion(builder, version); - Message.addHeaderType(builder, headerType); - Message.addHeader(builder, headerOffset); - Message.addBodyLength(builder, bodyLength); - Message.addCustomMetadata(builder, customMetadataOffset); - return Message.endMessage(builder); -} -} -} diff --git a/proto/raw-js-openapi/src/arrow/flight/flatbuf/Schema_generated.ts b/proto/raw-js-openapi/src/arrow/flight/flatbuf/Schema_generated.ts deleted file mode 100644 index 1c485ed3b78..00000000000 --- a/proto/raw-js-openapi/src/arrow/flight/flatbuf/Schema_generated.ts +++ /dev/null @@ -1,2807 +0,0 @@ -// automatically generated by the FlatBuffers compiler, do not modify - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum MetadataVersion{ - /** - * 0.1.0 (October 2016). - */ - V1= 0, - - /** - * 0.2.0 (February 2017). Non-backwards compatible with V1. - */ - V2= 1, - - /** - * 0.3.0 -> 0.7.1 (May - December 2017). Non-backwards compatible with V2. - */ - V3= 2, - - /** - * >= 0.8.0 (December 2017). Non-backwards compatible with V3. - */ - V4= 3, - - /** - * >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4 - * metadata and IPC messages). Implementations are recommended to provide a - * V4 compatibility mode with V5 format changes disabled. - * - * Incompatible changes between V4 and V5: - * - Union buffer layout has changed. In V5, Unions don't have a validity - * bitmap buffer. - */ - V5= 4 -}; -} - -/** - * Represents Arrow Features that might not have full support - * within implementations. This is intended to be used in - * two scenarios: - * 1. A mechanism for readers of Arrow Streams - * and files to understand that the stream or file makes - * use of a feature that isn't supported or unknown to - * the implementation (and therefore can meet the Arrow - * forward compatibility guarantees). - * 2. A means of negotiating between a client and server - * what features a stream is allowed to use. The enums - * values here are intented to represent higher level - * features, additional details maybe negotiated - * with key-value pairs specific to the protocol. - * - * Enums added to this list should be assigned power-of-two values - * to facilitate exchanging and comparing bitmaps for supported - * features. - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum Feature{ - /** - * Needed to make flatbuffers happy. - */ - UNUSED= 0, - - /** - * The stream makes use of multiple full dictionaries with the - * same ID and assumes clients implement dictionary replacement - * correctly. - */ - DICTIONARY_REPLACEMENT= 1, - - /** - * The stream makes use of compressed bodies as described - * in Message.fbs. - */ - COMPRESSED_BODY= 2 -}; -} - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum UnionMode{ - Sparse= 0, - Dense= 1 -}; -} - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum Precision{ - HALF= 0, - SINGLE= 1, - DOUBLE= 2 -}; -} - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum DateUnit{ - DAY= 0, - MILLISECOND= 1 -}; -} - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum TimeUnit{ - SECOND= 0, - MILLISECOND= 1, - MICROSECOND= 2, - NANOSECOND= 3 -}; -} - -/** - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum IntervalUnit{ - YEAR_MONTH= 0, - DAY_TIME= 1 -}; -} - -/** - * ---------------------------------------------------------------------- - * Top-level Type value, enabling extensible type-specific metadata. We can - * add new logical types to Type without breaking backwards compatibility - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum Type{ - NONE= 0, - Null= 1, - Int= 2, - FloatingPoint= 3, - Binary= 4, - Utf8= 5, - Bool= 6, - Decimal= 7, - Date= 8, - Time= 9, - Timestamp= 10, - Interval= 11, - List= 12, - Struct_= 13, - Union= 14, - FixedSizeBinary= 15, - FixedSizeList= 16, - Map= 17, - Duration= 18, - LargeBinary= 19, - LargeUtf8= 20, - LargeList= 21 -}; - -export function unionToType( - type: Type, - accessor: (obj:org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8) => org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8|null -): org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8|null { - switch(org.apache.arrow.flatbuf.Type[type]) { - case 'NONE': return null; - case 'Null': return accessor(new org.apache.arrow.flatbuf.Null())! as org.apache.arrow.flatbuf.Null; - case 'Int': return accessor(new org.apache.arrow.flatbuf.Int())! as org.apache.arrow.flatbuf.Int; - case 'FloatingPoint': return accessor(new org.apache.arrow.flatbuf.FloatingPoint())! as org.apache.arrow.flatbuf.FloatingPoint; - case 'Binary': return accessor(new org.apache.arrow.flatbuf.Binary())! as org.apache.arrow.flatbuf.Binary; - case 'Utf8': return accessor(new org.apache.arrow.flatbuf.Utf8())! as org.apache.arrow.flatbuf.Utf8; - case 'Bool': return accessor(new org.apache.arrow.flatbuf.Bool())! as org.apache.arrow.flatbuf.Bool; - case 'Decimal': return accessor(new org.apache.arrow.flatbuf.Decimal())! as org.apache.arrow.flatbuf.Decimal; - case 'Date': return accessor(new org.apache.arrow.flatbuf.Date())! as org.apache.arrow.flatbuf.Date; - case 'Time': return accessor(new org.apache.arrow.flatbuf.Time())! as org.apache.arrow.flatbuf.Time; - case 'Timestamp': return accessor(new org.apache.arrow.flatbuf.Timestamp())! as org.apache.arrow.flatbuf.Timestamp; - case 'Interval': return accessor(new org.apache.arrow.flatbuf.Interval())! as org.apache.arrow.flatbuf.Interval; - case 'List': return accessor(new org.apache.arrow.flatbuf.List())! as org.apache.arrow.flatbuf.List; - case 'Struct_': return accessor(new org.apache.arrow.flatbuf.Struct_())! as org.apache.arrow.flatbuf.Struct_; - case 'Union': return accessor(new org.apache.arrow.flatbuf.Union())! as org.apache.arrow.flatbuf.Union; - case 'FixedSizeBinary': return accessor(new org.apache.arrow.flatbuf.FixedSizeBinary())! as org.apache.arrow.flatbuf.FixedSizeBinary; - case 'FixedSizeList': return accessor(new org.apache.arrow.flatbuf.FixedSizeList())! as org.apache.arrow.flatbuf.FixedSizeList; - case 'Map': return accessor(new org.apache.arrow.flatbuf.Map())! as org.apache.arrow.flatbuf.Map; - case 'Duration': return accessor(new org.apache.arrow.flatbuf.Duration())! as org.apache.arrow.flatbuf.Duration; - case 'LargeBinary': return accessor(new org.apache.arrow.flatbuf.LargeBinary())! as org.apache.arrow.flatbuf.LargeBinary; - case 'LargeUtf8': return accessor(new org.apache.arrow.flatbuf.LargeUtf8())! as org.apache.arrow.flatbuf.LargeUtf8; - case 'LargeList': return accessor(new org.apache.arrow.flatbuf.LargeList())! as org.apache.arrow.flatbuf.LargeList; - default: return null; - } -} - -export function unionListToType( - type: Type, - accessor: (index: number, obj:org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8) => org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8|null, - index: number -): org.apache.arrow.flatbuf.Binary|org.apache.arrow.flatbuf.Bool|org.apache.arrow.flatbuf.Date|org.apache.arrow.flatbuf.Decimal|org.apache.arrow.flatbuf.Duration|org.apache.arrow.flatbuf.FixedSizeBinary|org.apache.arrow.flatbuf.FixedSizeList|org.apache.arrow.flatbuf.FloatingPoint|org.apache.arrow.flatbuf.Int|org.apache.arrow.flatbuf.Interval|org.apache.arrow.flatbuf.LargeBinary|org.apache.arrow.flatbuf.LargeList|org.apache.arrow.flatbuf.LargeUtf8|org.apache.arrow.flatbuf.List|org.apache.arrow.flatbuf.Map|org.apache.arrow.flatbuf.Null|org.apache.arrow.flatbuf.Struct_|org.apache.arrow.flatbuf.Time|org.apache.arrow.flatbuf.Timestamp|org.apache.arrow.flatbuf.Union|org.apache.arrow.flatbuf.Utf8|null { - switch(org.apache.arrow.flatbuf.Type[type]) { - case 'NONE': return null; - case 'Null': return accessor(index, new org.apache.arrow.flatbuf.Null())! as org.apache.arrow.flatbuf.Null; - case 'Int': return accessor(index, new org.apache.arrow.flatbuf.Int())! as org.apache.arrow.flatbuf.Int; - case 'FloatingPoint': return accessor(index, new org.apache.arrow.flatbuf.FloatingPoint())! as org.apache.arrow.flatbuf.FloatingPoint; - case 'Binary': return accessor(index, new org.apache.arrow.flatbuf.Binary())! as org.apache.arrow.flatbuf.Binary; - case 'Utf8': return accessor(index, new org.apache.arrow.flatbuf.Utf8())! as org.apache.arrow.flatbuf.Utf8; - case 'Bool': return accessor(index, new org.apache.arrow.flatbuf.Bool())! as org.apache.arrow.flatbuf.Bool; - case 'Decimal': return accessor(index, new org.apache.arrow.flatbuf.Decimal())! as org.apache.arrow.flatbuf.Decimal; - case 'Date': return accessor(index, new org.apache.arrow.flatbuf.Date())! as org.apache.arrow.flatbuf.Date; - case 'Time': return accessor(index, new org.apache.arrow.flatbuf.Time())! as org.apache.arrow.flatbuf.Time; - case 'Timestamp': return accessor(index, new org.apache.arrow.flatbuf.Timestamp())! as org.apache.arrow.flatbuf.Timestamp; - case 'Interval': return accessor(index, new org.apache.arrow.flatbuf.Interval())! as org.apache.arrow.flatbuf.Interval; - case 'List': return accessor(index, new org.apache.arrow.flatbuf.List())! as org.apache.arrow.flatbuf.List; - case 'Struct_': return accessor(index, new org.apache.arrow.flatbuf.Struct_())! as org.apache.arrow.flatbuf.Struct_; - case 'Union': return accessor(index, new org.apache.arrow.flatbuf.Union())! as org.apache.arrow.flatbuf.Union; - case 'FixedSizeBinary': return accessor(index, new org.apache.arrow.flatbuf.FixedSizeBinary())! as org.apache.arrow.flatbuf.FixedSizeBinary; - case 'FixedSizeList': return accessor(index, new org.apache.arrow.flatbuf.FixedSizeList())! as org.apache.arrow.flatbuf.FixedSizeList; - case 'Map': return accessor(index, new org.apache.arrow.flatbuf.Map())! as org.apache.arrow.flatbuf.Map; - case 'Duration': return accessor(index, new org.apache.arrow.flatbuf.Duration())! as org.apache.arrow.flatbuf.Duration; - case 'LargeBinary': return accessor(index, new org.apache.arrow.flatbuf.LargeBinary())! as org.apache.arrow.flatbuf.LargeBinary; - case 'LargeUtf8': return accessor(index, new org.apache.arrow.flatbuf.LargeUtf8())! as org.apache.arrow.flatbuf.LargeUtf8; - case 'LargeList': return accessor(index, new org.apache.arrow.flatbuf.LargeList())! as org.apache.arrow.flatbuf.LargeList; - default: return null; - } -} -} - -/** - * ---------------------------------------------------------------------- - * Dictionary encoding metadata - * Maintained for forwards compatibility, in the future - * Dictionaries might be explicit maps between integers and values - * allowing for non-contiguous index values - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum DictionaryKind{ - DenseArray= 0 -}; -} - -/** - * ---------------------------------------------------------------------- - * Endianness of the platform producing the data - * - * @enum {number} - */ -export namespace org.apache.arrow.flatbuf{ -export enum Endianness{ - Little= 0, - Big= 1 -}; -} - -/** - * These are stored in the flatbuffer in the Type union below - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Null { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Null - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Null { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Null= obj - * @returns Null - */ -static getRootAsNull(bb:flatbuffers.ByteBuffer, obj?:Null):Null { - return (obj || new Null()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Null= obj - * @returns Null - */ -static getSizePrefixedRootAsNull(bb:flatbuffers.ByteBuffer, obj?:Null):Null { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Null()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startNull(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endNull(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createNull(builder:flatbuffers.Builder):flatbuffers.Offset { - Null.startNull(builder); - return Null.endNull(builder); -} -} -} -/** - * A Struct_ in the flatbuffer metadata is the same as an Arrow Struct - * (according to the physical memory layout). We used Struct_ here as - * Struct is a reserved word in Flatbuffers - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Struct_ { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Struct_ - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Struct_ { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Struct_= obj - * @returns Struct_ - */ -static getRootAsStruct_(bb:flatbuffers.ByteBuffer, obj?:Struct_):Struct_ { - return (obj || new Struct_()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Struct_= obj - * @returns Struct_ - */ -static getSizePrefixedRootAsStruct_(bb:flatbuffers.ByteBuffer, obj?:Struct_):Struct_ { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Struct_()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startStruct_(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endStruct_(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createStruct_(builder:flatbuffers.Builder):flatbuffers.Offset { - Struct_.startStruct_(builder); - return Struct_.endStruct_(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class List { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns List - */ -__init(i:number, bb:flatbuffers.ByteBuffer):List { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param List= obj - * @returns List - */ -static getRootAsList(bb:flatbuffers.ByteBuffer, obj?:List):List { - return (obj || new List()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param List= obj - * @returns List - */ -static getSizePrefixedRootAsList(bb:flatbuffers.ByteBuffer, obj?:List):List { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new List()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startList(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endList(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createList(builder:flatbuffers.Builder):flatbuffers.Offset { - List.startList(builder); - return List.endList(builder); -} -} -} -/** - * Same as List, but with 64-bit offsets, allowing to represent - * extremely large data values. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class LargeList { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns LargeList - */ -__init(i:number, bb:flatbuffers.ByteBuffer):LargeList { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeList= obj - * @returns LargeList - */ -static getRootAsLargeList(bb:flatbuffers.ByteBuffer, obj?:LargeList):LargeList { - return (obj || new LargeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeList= obj - * @returns LargeList - */ -static getSizePrefixedRootAsLargeList(bb:flatbuffers.ByteBuffer, obj?:LargeList):LargeList { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new LargeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startLargeList(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endLargeList(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createLargeList(builder:flatbuffers.Builder):flatbuffers.Offset { - LargeList.startLargeList(builder); - return LargeList.endLargeList(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class FixedSizeList { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns FixedSizeList - */ -__init(i:number, bb:flatbuffers.ByteBuffer):FixedSizeList { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FixedSizeList= obj - * @returns FixedSizeList - */ -static getRootAsFixedSizeList(bb:flatbuffers.ByteBuffer, obj?:FixedSizeList):FixedSizeList { - return (obj || new FixedSizeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FixedSizeList= obj - * @returns FixedSizeList - */ -static getSizePrefixedRootAsFixedSizeList(bb:flatbuffers.ByteBuffer, obj?:FixedSizeList):FixedSizeList { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new FixedSizeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Number of list items per value - * - * @returns number - */ -listSize():number { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startFixedSizeList(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param number listSize - */ -static addListSize(builder:flatbuffers.Builder, listSize:number) { - builder.addFieldInt32(0, listSize, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endFixedSizeList(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createFixedSizeList(builder:flatbuffers.Builder, listSize:number):flatbuffers.Offset { - FixedSizeList.startFixedSizeList(builder); - FixedSizeList.addListSize(builder, listSize); - return FixedSizeList.endFixedSizeList(builder); -} -} -} -/** - * A Map is a logical nested type that is represented as - * - * List> - * - * In this layout, the keys and values are each respectively contiguous. We do - * not constrain the key and value types, so the application is responsible - * for ensuring that the keys are hashable and unique. Whether the keys are sorted - * may be set in the metadata for this field. - * - * In a field with Map type, the field has a child Struct field, which then - * has two children: key type and the second the value type. The names of the - * child fields may be respectively "entries", "key", and "value", but this is - * not enforced. - * - * Map - * ```text - * - child[0] entries: Struct - * - child[0] key: K - * - child[1] value: V - * ``` - * Neither the "entries" field nor the "key" field may be nullable. - * - * The metadata is structured so that Arrow systems without special handling - * for Map can make Map an alias for List. The "layout" attribute for the Map - * field must have the same contents as a List. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Map { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Map - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Map { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Map= obj - * @returns Map - */ -static getRootAsMap(bb:flatbuffers.ByteBuffer, obj?:Map):Map { - return (obj || new Map()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Map= obj - * @returns Map - */ -static getSizePrefixedRootAsMap(bb:flatbuffers.ByteBuffer, obj?:Map):Map { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Map()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Set to true if the keys within each value are sorted - * - * @returns boolean - */ -keysSorted():boolean { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startMap(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param boolean keysSorted - */ -static addKeysSorted(builder:flatbuffers.Builder, keysSorted:boolean) { - builder.addFieldInt8(0, +keysSorted, +false); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endMap(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createMap(builder:flatbuffers.Builder, keysSorted:boolean):flatbuffers.Offset { - Map.startMap(builder); - Map.addKeysSorted(builder, keysSorted); - return Map.endMap(builder); -} -} -} -/** - * A union is a complex type with children in Field - * By default ids in the type vector refer to the offsets in the children - * optionally typeIds provides an indirection between the child offset and the type id - * for each child `typeIds[offset]` is the id used in the type vector - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Union { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Union - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Union { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Union= obj - * @returns Union - */ -static getRootAsUnion(bb:flatbuffers.ByteBuffer, obj?:Union):Union { - return (obj || new Union()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Union= obj - * @returns Union - */ -static getSizePrefixedRootAsUnion(bb:flatbuffers.ByteBuffer, obj?:Union):Union { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Union()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.UnionMode - */ -mode():org.apache.arrow.flatbuf.UnionMode { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.UnionMode.Sparse; -}; - -/** - * @param number index - * @returns number - */ -typeIds(index: number):number|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.readInt32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0; -}; - -/** - * @returns number - */ -typeIdsLength():number { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * @returns Int32Array - */ -typeIdsArray():Int32Array|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? new Int32Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startUnion(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.UnionMode mode - */ -static addMode(builder:flatbuffers.Builder, mode:org.apache.arrow.flatbuf.UnionMode) { - builder.addFieldInt16(0, mode, org.apache.arrow.flatbuf.UnionMode.Sparse); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset typeIdsOffset - */ -static addTypeIds(builder:flatbuffers.Builder, typeIdsOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, typeIdsOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createTypeIdsVector(builder:flatbuffers.Builder, data:number[]|Int32Array):flatbuffers.Offset; -/** - * @deprecated This Uint8Array overload will be removed in the future. - */ -static createTypeIdsVector(builder:flatbuffers.Builder, data:number[]|Uint8Array):flatbuffers.Offset; -static createTypeIdsVector(builder:flatbuffers.Builder, data:number[]|Int32Array|Uint8Array):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addInt32(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startTypeIdsVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endUnion(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createUnion(builder:flatbuffers.Builder, mode:org.apache.arrow.flatbuf.UnionMode, typeIdsOffset:flatbuffers.Offset):flatbuffers.Offset { - Union.startUnion(builder); - Union.addMode(builder, mode); - Union.addTypeIds(builder, typeIdsOffset); - return Union.endUnion(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Int { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Int - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Int { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Int= obj - * @returns Int - */ -static getRootAsInt(bb:flatbuffers.ByteBuffer, obj?:Int):Int { - return (obj || new Int()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Int= obj - * @returns Int - */ -static getSizePrefixedRootAsInt(bb:flatbuffers.ByteBuffer, obj?:Int):Int { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Int()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns number - */ -bitWidth():number { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0; -}; - -/** - * @returns boolean - */ -isSigned():boolean { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startInt(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param number bitWidth - */ -static addBitWidth(builder:flatbuffers.Builder, bitWidth:number) { - builder.addFieldInt32(0, bitWidth, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param boolean isSigned - */ -static addIsSigned(builder:flatbuffers.Builder, isSigned:boolean) { - builder.addFieldInt8(1, +isSigned, +false); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endInt(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createInt(builder:flatbuffers.Builder, bitWidth:number, isSigned:boolean):flatbuffers.Offset { - Int.startInt(builder); - Int.addBitWidth(builder, bitWidth); - Int.addIsSigned(builder, isSigned); - return Int.endInt(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class FloatingPoint { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns FloatingPoint - */ -__init(i:number, bb:flatbuffers.ByteBuffer):FloatingPoint { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FloatingPoint= obj - * @returns FloatingPoint - */ -static getRootAsFloatingPoint(bb:flatbuffers.ByteBuffer, obj?:FloatingPoint):FloatingPoint { - return (obj || new FloatingPoint()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FloatingPoint= obj - * @returns FloatingPoint - */ -static getSizePrefixedRootAsFloatingPoint(bb:flatbuffers.ByteBuffer, obj?:FloatingPoint):FloatingPoint { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new FloatingPoint()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.Precision - */ -precision():org.apache.arrow.flatbuf.Precision { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Precision.HALF; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startFloatingPoint(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.Precision precision - */ -static addPrecision(builder:flatbuffers.Builder, precision:org.apache.arrow.flatbuf.Precision) { - builder.addFieldInt16(0, precision, org.apache.arrow.flatbuf.Precision.HALF); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endFloatingPoint(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createFloatingPoint(builder:flatbuffers.Builder, precision:org.apache.arrow.flatbuf.Precision):flatbuffers.Offset { - FloatingPoint.startFloatingPoint(builder); - FloatingPoint.addPrecision(builder, precision); - return FloatingPoint.endFloatingPoint(builder); -} -} -} -/** - * Unicode with UTF-8 encoding - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Utf8 { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Utf8 - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Utf8 { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Utf8= obj - * @returns Utf8 - */ -static getRootAsUtf8(bb:flatbuffers.ByteBuffer, obj?:Utf8):Utf8 { - return (obj || new Utf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Utf8= obj - * @returns Utf8 - */ -static getSizePrefixedRootAsUtf8(bb:flatbuffers.ByteBuffer, obj?:Utf8):Utf8 { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Utf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startUtf8(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endUtf8(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createUtf8(builder:flatbuffers.Builder):flatbuffers.Offset { - Utf8.startUtf8(builder); - return Utf8.endUtf8(builder); -} -} -} -/** - * Opaque binary data - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Binary { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Binary - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Binary { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Binary= obj - * @returns Binary - */ -static getRootAsBinary(bb:flatbuffers.ByteBuffer, obj?:Binary):Binary { - return (obj || new Binary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Binary= obj - * @returns Binary - */ -static getSizePrefixedRootAsBinary(bb:flatbuffers.ByteBuffer, obj?:Binary):Binary { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Binary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startBinary(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endBinary(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createBinary(builder:flatbuffers.Builder):flatbuffers.Offset { - Binary.startBinary(builder); - return Binary.endBinary(builder); -} -} -} -/** - * Same as Utf8, but with 64-bit offsets, allowing to represent - * extremely large data values. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class LargeUtf8 { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns LargeUtf8 - */ -__init(i:number, bb:flatbuffers.ByteBuffer):LargeUtf8 { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeUtf8= obj - * @returns LargeUtf8 - */ -static getRootAsLargeUtf8(bb:flatbuffers.ByteBuffer, obj?:LargeUtf8):LargeUtf8 { - return (obj || new LargeUtf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeUtf8= obj - * @returns LargeUtf8 - */ -static getSizePrefixedRootAsLargeUtf8(bb:flatbuffers.ByteBuffer, obj?:LargeUtf8):LargeUtf8 { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new LargeUtf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startLargeUtf8(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endLargeUtf8(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createLargeUtf8(builder:flatbuffers.Builder):flatbuffers.Offset { - LargeUtf8.startLargeUtf8(builder); - return LargeUtf8.endLargeUtf8(builder); -} -} -} -/** - * Same as Binary, but with 64-bit offsets, allowing to represent - * extremely large data values. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class LargeBinary { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns LargeBinary - */ -__init(i:number, bb:flatbuffers.ByteBuffer):LargeBinary { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeBinary= obj - * @returns LargeBinary - */ -static getRootAsLargeBinary(bb:flatbuffers.ByteBuffer, obj?:LargeBinary):LargeBinary { - return (obj || new LargeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param LargeBinary= obj - * @returns LargeBinary - */ -static getSizePrefixedRootAsLargeBinary(bb:flatbuffers.ByteBuffer, obj?:LargeBinary):LargeBinary { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new LargeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startLargeBinary(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endLargeBinary(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createLargeBinary(builder:flatbuffers.Builder):flatbuffers.Offset { - LargeBinary.startLargeBinary(builder); - return LargeBinary.endLargeBinary(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class FixedSizeBinary { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns FixedSizeBinary - */ -__init(i:number, bb:flatbuffers.ByteBuffer):FixedSizeBinary { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FixedSizeBinary= obj - * @returns FixedSizeBinary - */ -static getRootAsFixedSizeBinary(bb:flatbuffers.ByteBuffer, obj?:FixedSizeBinary):FixedSizeBinary { - return (obj || new FixedSizeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param FixedSizeBinary= obj - * @returns FixedSizeBinary - */ -static getSizePrefixedRootAsFixedSizeBinary(bb:flatbuffers.ByteBuffer, obj?:FixedSizeBinary):FixedSizeBinary { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new FixedSizeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Number of bytes per value - * - * @returns number - */ -byteWidth():number { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startFixedSizeBinary(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param number byteWidth - */ -static addByteWidth(builder:flatbuffers.Builder, byteWidth:number) { - builder.addFieldInt32(0, byteWidth, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endFixedSizeBinary(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createFixedSizeBinary(builder:flatbuffers.Builder, byteWidth:number):flatbuffers.Offset { - FixedSizeBinary.startFixedSizeBinary(builder); - FixedSizeBinary.addByteWidth(builder, byteWidth); - return FixedSizeBinary.endFixedSizeBinary(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Bool { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Bool - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Bool { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Bool= obj - * @returns Bool - */ -static getRootAsBool(bb:flatbuffers.ByteBuffer, obj?:Bool):Bool { - return (obj || new Bool()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Bool= obj - * @returns Bool - */ -static getSizePrefixedRootAsBool(bb:flatbuffers.ByteBuffer, obj?:Bool):Bool { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Bool()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Builder builder - */ -static startBool(builder:flatbuffers.Builder) { - builder.startObject(0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endBool(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createBool(builder:flatbuffers.Builder):flatbuffers.Offset { - Bool.startBool(builder); - return Bool.endBool(builder); -} -} -} -/** - * Exact decimal value represented as an integer value in two's - * complement. Currently only 128-bit (16-byte) and 256-bit (32-byte) integers - * are used. The representation uses the endianness indicated - * in the Schema. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Decimal { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Decimal - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Decimal { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Decimal= obj - * @returns Decimal - */ -static getRootAsDecimal(bb:flatbuffers.ByteBuffer, obj?:Decimal):Decimal { - return (obj || new Decimal()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Decimal= obj - * @returns Decimal - */ -static getSizePrefixedRootAsDecimal(bb:flatbuffers.ByteBuffer, obj?:Decimal):Decimal { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Decimal()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Total number of decimal digits - * - * @returns number - */ -precision():number { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0; -}; - -/** - * Number of digits after the decimal point "." - * - * @returns number - */ -scale():number { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0; -}; - -/** - * Number of bits per value. The only accepted widths are 128 and 256. - * We use bitWidth for consistency with Int::bitWidth. - * - * @returns number - */ -bitWidth():number { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 128; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startDecimal(builder:flatbuffers.Builder) { - builder.startObject(3); -}; - -/** - * @param flatbuffers.Builder builder - * @param number precision - */ -static addPrecision(builder:flatbuffers.Builder, precision:number) { - builder.addFieldInt32(0, precision, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param number scale - */ -static addScale(builder:flatbuffers.Builder, scale:number) { - builder.addFieldInt32(1, scale, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param number bitWidth - */ -static addBitWidth(builder:flatbuffers.Builder, bitWidth:number) { - builder.addFieldInt32(2, bitWidth, 128); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endDecimal(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createDecimal(builder:flatbuffers.Builder, precision:number, scale:number, bitWidth:number):flatbuffers.Offset { - Decimal.startDecimal(builder); - Decimal.addPrecision(builder, precision); - Decimal.addScale(builder, scale); - Decimal.addBitWidth(builder, bitWidth); - return Decimal.endDecimal(builder); -} -} -} -/** - * Date is either a 32-bit or 64-bit type representing elapsed time since UNIX - * epoch (1970-01-01), stored in either of two units: - * - * * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no - * leap seconds), where the values are evenly divisible by 86400000 - * * Days (32 bits) since the UNIX epoch - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Date { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Date - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Date { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Date= obj - * @returns Date - */ -static getRootAsDate(bb:flatbuffers.ByteBuffer, obj?:Date):Date { - return (obj || new Date()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Date= obj - * @returns Date - */ -static getSizePrefixedRootAsDate(bb:flatbuffers.ByteBuffer, obj?:Date):Date { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Date()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.DateUnit - */ -unit():org.apache.arrow.flatbuf.DateUnit { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.DateUnit.MILLISECOND; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startDate(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.DateUnit unit - */ -static addUnit(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.DateUnit) { - builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.DateUnit.MILLISECOND); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endDate(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createDate(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.DateUnit):flatbuffers.Offset { - Date.startDate(builder); - Date.addUnit(builder, unit); - return Date.endDate(builder); -} -} -} -/** - * Time type. The physical storage type depends on the unit - * - SECOND and MILLISECOND: 32 bits - * - MICROSECOND and NANOSECOND: 64 bits - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Time { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Time - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Time { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Time= obj - * @returns Time - */ -static getRootAsTime(bb:flatbuffers.ByteBuffer, obj?:Time):Time { - return (obj || new Time()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Time= obj - * @returns Time - */ -static getSizePrefixedRootAsTime(bb:flatbuffers.ByteBuffer, obj?:Time):Time { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Time()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.TimeUnit - */ -unit():org.apache.arrow.flatbuf.TimeUnit { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.MILLISECOND; -}; - -/** - * @returns number - */ -bitWidth():number { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.readInt32(this.bb_pos + offset) : 32; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startTime(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.TimeUnit unit - */ -static addUnit(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit) { - builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.MILLISECOND); -}; - -/** - * @param flatbuffers.Builder builder - * @param number bitWidth - */ -static addBitWidth(builder:flatbuffers.Builder, bitWidth:number) { - builder.addFieldInt32(1, bitWidth, 32); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endTime(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createTime(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit, bitWidth:number):flatbuffers.Offset { - Time.startTime(builder); - Time.addUnit(builder, unit); - Time.addBitWidth(builder, bitWidth); - return Time.endTime(builder); -} -} -} -/** - * Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding - * leap seconds, as a 64-bit integer. Note that UNIX time does not include - * leap seconds. - * - * Date & time libraries often have multiple different data types for temporal - * data. In order to ease interoperability between different implementations the - * Arrow project has some recommendations for encoding these types into a Timestamp - * column. - * - * An "instant" represents a single moment in time that has no meaningful time zone - * or the time zone is unknown. A column of instants can also contain values from - * multiple time zones. To encode an instant set the timezone string to "UTC". - * - * A "zoned date-time" represents a single moment in time that has a meaningful - * reference time zone. To encode a zoned date-time as a Timestamp set the timezone - * string to the name of the timezone. There is some ambiguity between an instant - * and a zoned date-time with the UTC time zone. Both of these are stored the same. - * Typically, this distinction does not matter. If it does, then an application should - * use custom metadata or an extension type to distinguish between the two cases. - * - * An "offset date-time" represents a single moment in time combined with a meaningful - * offset from UTC. To encode an offset date-time as a Timestamp set the timezone string - * to the numeric time zone offset string (e.g. "+03:00"). - * - * A "local date-time" does not represent a single moment in time. It represents a wall - * clock time combined with a date. Because of daylight savings time there may multiple - * instants that correspond to a single local date-time in any given time zone. A - * local date-time is often stored as a struct or a Date32/Time64 pair. However, it can - * also be encoded into a Timestamp column. To do so the value should be the the time - * elapsed from the Unix epoch so that a wall clock in UTC would display the desired time. - * The timezone string should be set to null or the empty string. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Timestamp { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Timestamp - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Timestamp { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Timestamp= obj - * @returns Timestamp - */ -static getRootAsTimestamp(bb:flatbuffers.ByteBuffer, obj?:Timestamp):Timestamp { - return (obj || new Timestamp()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Timestamp= obj - * @returns Timestamp - */ -static getSizePrefixedRootAsTimestamp(bb:flatbuffers.ByteBuffer, obj?:Timestamp):Timestamp { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Timestamp()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.TimeUnit - */ -unit():org.apache.arrow.flatbuf.TimeUnit { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.SECOND; -}; - -/** - * The time zone is a string indicating the name of a time zone, one of: - * - * * As used in the Olson time zone database (the "tz database" or - * "tzdata"), such as "America/New_York" - * * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30 - * - * Whether a timezone string is present indicates different semantics about - * the data: - * - * * If the time zone is null or an empty string, the data is a local date-time - * and does not represent a single moment in time. Instead it represents a wall clock - * time and care should be taken to avoid interpreting it semantically as an instant. - * - * * If the time zone is set to a valid value, values can be displayed as - * "localized" to that time zone, even though the underlying 64-bit - * integers are identical to the same data stored in UTC. Converting - * between time zones is a metadata-only operation and does not change the - * underlying values - * - * @param flatbuffers.Encoding= optionalEncoding - * @returns string|Uint8Array|null - */ -timezone():string|null -timezone(optionalEncoding:flatbuffers.Encoding):string|Uint8Array|null -timezone(optionalEncoding?:any):string|Uint8Array|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startTimestamp(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.TimeUnit unit - */ -static addUnit(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit) { - builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.SECOND); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset timezoneOffset - */ -static addTimezone(builder:flatbuffers.Builder, timezoneOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, timezoneOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endTimestamp(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createTimestamp(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit, timezoneOffset:flatbuffers.Offset):flatbuffers.Offset { - Timestamp.startTimestamp(builder); - Timestamp.addUnit(builder, unit); - Timestamp.addTimezone(builder, timezoneOffset); - return Timestamp.endTimestamp(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Interval { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Interval - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Interval { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Interval= obj - * @returns Interval - */ -static getRootAsInterval(bb:flatbuffers.ByteBuffer, obj?:Interval):Interval { - return (obj || new Interval()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Interval= obj - * @returns Interval - */ -static getSizePrefixedRootAsInterval(bb:flatbuffers.ByteBuffer, obj?:Interval):Interval { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Interval()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.IntervalUnit - */ -unit():org.apache.arrow.flatbuf.IntervalUnit { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startInterval(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.IntervalUnit unit - */ -static addUnit(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.IntervalUnit) { - builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endInterval(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createInterval(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.IntervalUnit):flatbuffers.Offset { - Interval.startInterval(builder); - Interval.addUnit(builder, unit); - return Interval.endInterval(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Duration { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Duration - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Duration { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Duration= obj - * @returns Duration - */ -static getRootAsDuration(bb:flatbuffers.ByteBuffer, obj?:Duration):Duration { - return (obj || new Duration()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Duration= obj - * @returns Duration - */ -static getSizePrefixedRootAsDuration(bb:flatbuffers.ByteBuffer, obj?:Duration):Duration { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Duration()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @returns org.apache.arrow.flatbuf.TimeUnit - */ -unit():org.apache.arrow.flatbuf.TimeUnit { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.MILLISECOND; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startDuration(builder:flatbuffers.Builder) { - builder.startObject(1); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.TimeUnit unit - */ -static addUnit(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit) { - builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.MILLISECOND); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endDuration(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createDuration(builder:flatbuffers.Builder, unit:org.apache.arrow.flatbuf.TimeUnit):flatbuffers.Offset { - Duration.startDuration(builder); - Duration.addUnit(builder, unit); - return Duration.endDuration(builder); -} -} -} -/** - * ---------------------------------------------------------------------- - * user defined key value pairs to add custom metadata to arrow - * key namespacing is the responsibility of the user - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class KeyValue { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns KeyValue - */ -__init(i:number, bb:flatbuffers.ByteBuffer):KeyValue { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param KeyValue= obj - * @returns KeyValue - */ -static getRootAsKeyValue(bb:flatbuffers.ByteBuffer, obj?:KeyValue):KeyValue { - return (obj || new KeyValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param KeyValue= obj - * @returns KeyValue - */ -static getSizePrefixedRootAsKeyValue(bb:flatbuffers.ByteBuffer, obj?:KeyValue):KeyValue { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new KeyValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.Encoding= optionalEncoding - * @returns string|Uint8Array|null - */ -key():string|null -key(optionalEncoding:flatbuffers.Encoding):string|Uint8Array|null -key(optionalEncoding?:any):string|Uint8Array|null { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null; -}; - -/** - * @param flatbuffers.Encoding= optionalEncoding - * @returns string|Uint8Array|null - */ -value():string|null -value(optionalEncoding:flatbuffers.Encoding):string|Uint8Array|null -value(optionalEncoding?:any):string|Uint8Array|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startKeyValue(builder:flatbuffers.Builder) { - builder.startObject(2); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset keyOffset - */ -static addKey(builder:flatbuffers.Builder, keyOffset:flatbuffers.Offset) { - builder.addFieldOffset(0, keyOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset valueOffset - */ -static addValue(builder:flatbuffers.Builder, valueOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, valueOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endKeyValue(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -static createKeyValue(builder:flatbuffers.Builder, keyOffset:flatbuffers.Offset, valueOffset:flatbuffers.Offset):flatbuffers.Offset { - KeyValue.startKeyValue(builder); - KeyValue.addKey(builder, keyOffset); - KeyValue.addValue(builder, valueOffset); - return KeyValue.endKeyValue(builder); -} -} -} -/** - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class DictionaryEncoding { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns DictionaryEncoding - */ -__init(i:number, bb:flatbuffers.ByteBuffer):DictionaryEncoding { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param DictionaryEncoding= obj - * @returns DictionaryEncoding - */ -static getRootAsDictionaryEncoding(bb:flatbuffers.ByteBuffer, obj?:DictionaryEncoding):DictionaryEncoding { - return (obj || new DictionaryEncoding()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param DictionaryEncoding= obj - * @returns DictionaryEncoding - */ -static getSizePrefixedRootAsDictionaryEncoding(bb:flatbuffers.ByteBuffer, obj?:DictionaryEncoding):DictionaryEncoding { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new DictionaryEncoding()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * The known dictionary id in the application where this data is used. In - * the file or streaming formats, the dictionary ids are found in the - * DictionaryBatch messages - * - * @returns flatbuffers.Long - */ -id():flatbuffers.Long { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0); -}; - -/** - * The dictionary indices are constrained to be non-negative integers. If - * this field is null, the indices must be signed int32. To maximize - * cross-language compatibility and performance, implementations are - * recommended to prefer signed integer types over unsigned integer types - * and to avoid uint64 indices unless they are required by an application. - * - * @param org.apache.arrow.flatbuf.Int= obj - * @returns org.apache.arrow.flatbuf.Int|null - */ -indexType(obj?:org.apache.arrow.flatbuf.Int):org.apache.arrow.flatbuf.Int|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? (obj || new org.apache.arrow.flatbuf.Int()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null; -}; - -/** - * By default, dictionaries are not ordered, or the order does not have - * semantic meaning. In some statistical, applications, dictionary-encoding - * is used to represent ordered categorical data, and we provide a way to - * preserve that metadata here - * - * @returns boolean - */ -isOrdered():boolean { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false; -}; - -/** - * @returns org.apache.arrow.flatbuf.DictionaryKind - */ -dictionaryKind():org.apache.arrow.flatbuf.DictionaryKind { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.DictionaryKind.DenseArray; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startDictionaryEncoding(builder:flatbuffers.Builder) { - builder.startObject(4); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long id - */ -static addId(builder:flatbuffers.Builder, id:flatbuffers.Long) { - builder.addFieldInt64(0, id, builder.createLong(0, 0)); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset indexTypeOffset - */ -static addIndexType(builder:flatbuffers.Builder, indexTypeOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, indexTypeOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param boolean isOrdered - */ -static addIsOrdered(builder:flatbuffers.Builder, isOrdered:boolean) { - builder.addFieldInt8(2, +isOrdered, +false); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.DictionaryKind dictionaryKind - */ -static addDictionaryKind(builder:flatbuffers.Builder, dictionaryKind:org.apache.arrow.flatbuf.DictionaryKind) { - builder.addFieldInt16(3, dictionaryKind, org.apache.arrow.flatbuf.DictionaryKind.DenseArray); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endDictionaryEncoding(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -} -} -/** - * ---------------------------------------------------------------------- - * A field represents a named column in a record / row batch or child of a - * nested type. - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Field { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Field - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Field { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Field= obj - * @returns Field - */ -static getRootAsField(bb:flatbuffers.ByteBuffer, obj?:Field):Field { - return (obj || new Field()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Field= obj - * @returns Field - */ -static getSizePrefixedRootAsField(bb:flatbuffers.ByteBuffer, obj?:Field):Field { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Field()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * Name is not required, in i.e. a List - * - * @param flatbuffers.Encoding= optionalEncoding - * @returns string|Uint8Array|null - */ -name():string|null -name(optionalEncoding:flatbuffers.Encoding):string|Uint8Array|null -name(optionalEncoding?:any):string|Uint8Array|null { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null; -}; - -/** - * Whether or not this field can contain nulls. Should be true in general. - * - * @returns boolean - */ -nullable():boolean { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false; -}; - -/** - * @returns org.apache.arrow.flatbuf.Type - */ -typeType():org.apache.arrow.flatbuf.Type { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? /** */ (this.bb!.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Type.NONE; -}; - -/** - * This is the type of the decoded value if the field is dictionary encoded. - * - * @param flatbuffers.Table obj - * @returns ?flatbuffers.Table - */ -type(obj:T):T|null { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? this.bb!.__union(obj, this.bb_pos + offset) : null; -}; - -/** - * Present only if the field is dictionary encoded. - * - * @param org.apache.arrow.flatbuf.DictionaryEncoding= obj - * @returns org.apache.arrow.flatbuf.DictionaryEncoding|null - */ -dictionary(obj?:org.apache.arrow.flatbuf.DictionaryEncoding):org.apache.arrow.flatbuf.DictionaryEncoding|null { - var offset = this.bb!.__offset(this.bb_pos, 12); - return offset ? (obj || new org.apache.arrow.flatbuf.DictionaryEncoding()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null; -}; - -/** - * children apply only to nested data types like Struct, List and Union. For - * primitive types children will have length 0. - * - * @param number index - * @param org.apache.arrow.flatbuf.Field= obj - * @returns org.apache.arrow.flatbuf.Field - */ -children(index: number, obj?:org.apache.arrow.flatbuf.Field):org.apache.arrow.flatbuf.Field|null { - var offset = this.bb!.__offset(this.bb_pos, 14); - return offset ? (obj || new org.apache.arrow.flatbuf.Field()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null; -}; - -/** - * @returns number - */ -childrenLength():number { - var offset = this.bb!.__offset(this.bb_pos, 14); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * User-defined metadata - * - * @param number index - * @param org.apache.arrow.flatbuf.KeyValue= obj - * @returns org.apache.arrow.flatbuf.KeyValue - */ -customMetadata(index: number, obj?:org.apache.arrow.flatbuf.KeyValue):org.apache.arrow.flatbuf.KeyValue|null { - var offset = this.bb!.__offset(this.bb_pos, 16); - return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null; -}; - -/** - * @returns number - */ -customMetadataLength():number { - var offset = this.bb!.__offset(this.bb_pos, 16); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startField(builder:flatbuffers.Builder) { - builder.startObject(7); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset nameOffset - */ -static addName(builder:flatbuffers.Builder, nameOffset:flatbuffers.Offset) { - builder.addFieldOffset(0, nameOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param boolean nullable - */ -static addNullable(builder:flatbuffers.Builder, nullable:boolean) { - builder.addFieldInt8(1, +nullable, +false); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.Type typeType - */ -static addTypeType(builder:flatbuffers.Builder, typeType:org.apache.arrow.flatbuf.Type) { - builder.addFieldInt8(2, typeType, org.apache.arrow.flatbuf.Type.NONE); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset typeOffset - */ -static addType(builder:flatbuffers.Builder, typeOffset:flatbuffers.Offset) { - builder.addFieldOffset(3, typeOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset dictionaryOffset - */ -static addDictionary(builder:flatbuffers.Builder, dictionaryOffset:flatbuffers.Offset) { - builder.addFieldOffset(4, dictionaryOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset childrenOffset - */ -static addChildren(builder:flatbuffers.Builder, childrenOffset:flatbuffers.Offset) { - builder.addFieldOffset(5, childrenOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createChildrenVector(builder:flatbuffers.Builder, data:flatbuffers.Offset[]):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addOffset(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startChildrenVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset customMetadataOffset - */ -static addCustomMetadata(builder:flatbuffers.Builder, customMetadataOffset:flatbuffers.Offset) { - builder.addFieldOffset(6, customMetadataOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createCustomMetadataVector(builder:flatbuffers.Builder, data:flatbuffers.Offset[]):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addOffset(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startCustomMetadataVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endField(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -} -} -/** - * ---------------------------------------------------------------------- - * A Buffer represents a single contiguous memory segment - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Buffer { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Buffer - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Buffer { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * The relative offset into the shared memory page where the bytes for this - * buffer starts - * - * @returns flatbuffers.Long - */ -offset():flatbuffers.Long { - return this.bb!.readInt64(this.bb_pos); -}; - -/** - * The absolute length (in bytes) of the memory buffer. The memory is found - * from offset (inclusive) to offset + length (non-inclusive). When building - * messages using the encapsulated IPC message, padding bytes may be written - * after a buffer, but such padding bytes do not need to be accounted for in - * the size here. - * - * @returns flatbuffers.Long - */ -length():flatbuffers.Long { - return this.bb!.readInt64(this.bb_pos + 8); -}; - -/** - * @returns number - */ -static sizeOf():number { - return 16; -} - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Long offset - * @param flatbuffers.Long length - * @returns flatbuffers.Offset - */ -static createBuffer(builder:flatbuffers.Builder, offset: flatbuffers.Long, length: flatbuffers.Long):flatbuffers.Offset { - builder.prep(8, 16); - builder.writeInt64(length); - builder.writeInt64(offset); - return builder.offset(); -}; - -} -} -/** - * ---------------------------------------------------------------------- - * A Schema describes the columns in a row batch - * - * @constructor - */ -export namespace org.apache.arrow.flatbuf{ -export class Schema { - bb: flatbuffers.ByteBuffer|null = null; - - bb_pos:number = 0; -/** - * @param number i - * @param flatbuffers.ByteBuffer bb - * @returns Schema - */ -__init(i:number, bb:flatbuffers.ByteBuffer):Schema { - this.bb_pos = i; - this.bb = bb; - return this; -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Schema= obj - * @returns Schema - */ -static getRootAsSchema(bb:flatbuffers.ByteBuffer, obj?:Schema):Schema { - return (obj || new Schema()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * @param flatbuffers.ByteBuffer bb - * @param Schema= obj - * @returns Schema - */ -static getSizePrefixedRootAsSchema(bb:flatbuffers.ByteBuffer, obj?:Schema):Schema { - bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); - return (obj || new Schema()).__init(bb.readInt32(bb.position()) + bb.position(), bb); -}; - -/** - * endianness of the buffer - * it is Little Endian by default - * if endianness doesn't match the underlying system then the vectors need to be converted - * - * @returns org.apache.arrow.flatbuf.Endianness - */ -endianness():org.apache.arrow.flatbuf.Endianness { - var offset = this.bb!.__offset(this.bb_pos, 4); - return offset ? /** */ (this.bb!.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Endianness.Little; -}; - -/** - * @param number index - * @param org.apache.arrow.flatbuf.Field= obj - * @returns org.apache.arrow.flatbuf.Field - */ -fields(index: number, obj?:org.apache.arrow.flatbuf.Field):org.apache.arrow.flatbuf.Field|null { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? (obj || new org.apache.arrow.flatbuf.Field()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null; -}; - -/** - * @returns number - */ -fieldsLength():number { - var offset = this.bb!.__offset(this.bb_pos, 6); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * @param number index - * @param org.apache.arrow.flatbuf.KeyValue= obj - * @returns org.apache.arrow.flatbuf.KeyValue - */ -customMetadata(index: number, obj?:org.apache.arrow.flatbuf.KeyValue):org.apache.arrow.flatbuf.KeyValue|null { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null; -}; - -/** - * @returns number - */ -customMetadataLength():number { - var offset = this.bb!.__offset(this.bb_pos, 8); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * Features used in the stream/file. - * - * @param number index - * @returns flatbuffers.Long - */ -features(index: number):flatbuffers.Long|null { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? /** */ (this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8)) : this.bb!.createLong(0, 0); -}; - -/** - * @returns number - */ -featuresLength():number { - var offset = this.bb!.__offset(this.bb_pos, 10); - return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0; -}; - -/** - * @param flatbuffers.Builder builder - */ -static startSchema(builder:flatbuffers.Builder) { - builder.startObject(4); -}; - -/** - * @param flatbuffers.Builder builder - * @param org.apache.arrow.flatbuf.Endianness endianness - */ -static addEndianness(builder:flatbuffers.Builder, endianness:org.apache.arrow.flatbuf.Endianness) { - builder.addFieldInt16(0, endianness, org.apache.arrow.flatbuf.Endianness.Little); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset fieldsOffset - */ -static addFields(builder:flatbuffers.Builder, fieldsOffset:flatbuffers.Offset) { - builder.addFieldOffset(1, fieldsOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createFieldsVector(builder:flatbuffers.Builder, data:flatbuffers.Offset[]):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addOffset(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startFieldsVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset customMetadataOffset - */ -static addCustomMetadata(builder:flatbuffers.Builder, customMetadataOffset:flatbuffers.Offset) { - builder.addFieldOffset(2, customMetadataOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createCustomMetadataVector(builder:flatbuffers.Builder, data:flatbuffers.Offset[]):flatbuffers.Offset { - builder.startVector(4, data.length, 4); - for (var i = data.length - 1; i >= 0; i--) { - builder.addOffset(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startCustomMetadataVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(4, numElems, 4); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset featuresOffset - */ -static addFeatures(builder:flatbuffers.Builder, featuresOffset:flatbuffers.Offset) { - builder.addFieldOffset(3, featuresOffset, 0); -}; - -/** - * @param flatbuffers.Builder builder - * @param Array. data - * @returns flatbuffers.Offset - */ -static createFeaturesVector(builder:flatbuffers.Builder, data:flatbuffers.Long[]):flatbuffers.Offset { - builder.startVector(8, data.length, 8); - for (var i = data.length - 1; i >= 0; i--) { - builder.addInt64(data[i]); - } - return builder.endVector(); -}; - -/** - * @param flatbuffers.Builder builder - * @param number numElems - */ -static startFeaturesVector(builder:flatbuffers.Builder, numElems:number) { - builder.startVector(8, numElems, 8); -}; - -/** - * @param flatbuffers.Builder builder - * @returns flatbuffers.Offset - */ -static endSchema(builder:flatbuffers.Builder):flatbuffers.Offset { - var offset = builder.endObject(); - return offset; -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset offset - */ -static finishSchemaBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) { - builder.finish(offset); -}; - -/** - * @param flatbuffers.Builder builder - * @param flatbuffers.Offset offset - */ -static finishSizePrefixedSchemaBuffer(builder:flatbuffers.Builder, offset:flatbuffers.Offset) { - builder.finish(offset, undefined, true); -}; - -static createSchema(builder:flatbuffers.Builder, endianness:org.apache.arrow.flatbuf.Endianness, fieldsOffset:flatbuffers.Offset, customMetadataOffset:flatbuffers.Offset, featuresOffset:flatbuffers.Offset):flatbuffers.Offset { - Schema.startSchema(builder); - Schema.addEndianness(builder, endianness); - Schema.addFields(builder, fieldsOffset); - Schema.addCustomMetadata(builder, customMetadataOffset); - Schema.addFeatures(builder, featuresOffset); - return Schema.endSchema(builder); -} -} -} diff --git a/proto/raw-js-openapi/src/index.js b/proto/raw-js-openapi/src/index.js index d753c1346cc..0575c4b2580 100644 --- a/proto/raw-js-openapi/src/index.js +++ b/proto/raw-js-openapi/src/index.js @@ -28,11 +28,6 @@ var browserHeaders = require("browser-headers"); var grpcWeb = require("@improbable-eng/grpc-web");//usually .grpc var jspb = require("google-protobuf"); -// var flatbuffers = require("flatbuffers").flatbuffers; -// var barrage = require("@deephaven/barrage"); - -// var message = require('./arrow/flight/flatbuf/Message_generated'); -// var schema = require('./arrow/flight/flatbuf/Schema_generated'); var io = { deephaven: { proto: { @@ -58,17 +53,8 @@ var io = { deephaven: { hierarchicaltable_pb: proto.io.deephaven.proto.backplane.grpc, hierarchicaltable_pb_service: hierarchicalTableService }, - // barrage: { - // "flatbuf": { - // "Barrage_generated": barrage, - // } - // } }}; var arrow = { flight: { - // flatbuf: { - // Message_generated: message, - // Schema_generated: schema, - // }, protocol: { Flight_pb: proto.arrow.flight.protocol, Flight_pb_service: flightService, @@ -80,7 +66,6 @@ var dhinternal = { browserHeaders, jspb, grpcWeb,//TODO need to expand this to the specific things we need - // flatbuffers, io, arrow }; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index b9d77372adf..2c781af0f3a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -124,9 +124,8 @@ import static io.deephaven.web.client.api.barrage.WebBarrageUtils.DeltaUpdatesBuilder; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.createSnapshot; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.deltaUpdates; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.makeUint8ArrayFromBitset; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.typedArrayToLittleEndianByteBuffer; +import static io.deephaven.web.client.api.barrage.WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer; import static io.deephaven.web.client.api.barrage.WebGrpcUtils.CLIENT_OPTIONS; /** @@ -1213,7 +1212,7 @@ private static Uint8Array createMessage(FlatBufferBuilder payload, byte messageH int bodyLength, int customMetadataOffset) { payload.finish(Message.createMessage(payload, MetadataVersion.V5, messageHeaderType, messageHeaderOffset, bodyLength, customMetadataOffset)); - return new Uint8Array(TypedArrayHelper.unwrap(payload.dataBuffer())); + return WebBarrageUtils.bbToUint8ArrayView(payload.dataBuffer()); } public Promise mergeTables(JsTable[] tables, HasEventHandling failHandler) { @@ -1437,9 +1436,8 @@ private void flush() { int serializationOptionsOffset = BarrageSubscriptionOptions .createBarrageSubscriptionOptions(subscriptionReq, ColumnConversionMode.Stringify, true, 1000, 0, 0, false); - int tableTicketOffset = - BarrageSubscriptionRequest.createTicketVector(subscriptionReq, - TypedArrayHelper.wrap(state.getHandle().getTicket())); + int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(subscriptionReq, + Js.uncheckedCast(state.getHandle().getTicket())); BarrageSubscriptionRequest.startBarrageSubscriptionRequest(subscriptionReq); BarrageSubscriptionRequest.addColumns(subscriptionReq, columnsOffset); BarrageSubscriptionRequest.addSubscriptionOptions(subscriptionReq, serializationOptionsOffset); @@ -1461,7 +1459,7 @@ private void flush() { stream.onData(new JsConsumer() { @Override public void apply(FlightData data) { - ByteBuffer body = typedArrayToLittleEndianByteBuffer(data.getDataBody_asU8()); + ByteBuffer body = typedArrayToAlignedLittleEndianByteBuffer(data.getDataBody_asU8()); Message headerMessage = Message .getRootAsMessage(TypedArrayHelper.wrap(data.getDataHeader_asU8())); if (body.limit() == 0 && headerMessage.headerType() != MessageHeader.RecordBatch) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index 27e910a47c0..a099b4c34a7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -36,7 +36,6 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.function.DoubleFunction; import java.util.function.IntFunction; import java.util.stream.IntStream; @@ -53,14 +52,20 @@ public static Uint8Array wrapMessage(FlatBufferBuilder innerBuilder, byte messag int offset = BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, MAGIC, messageType, messageOffset); outerBuilder.finish(offset); - return new Uint8Array(TypedArrayHelper.unwrap(outerBuilder.dataBuffer().slice())); + ByteBuffer byteBuffer = outerBuilder.dataBuffer(); + return bbToUint8ArrayView(byteBuffer); + } + + public static Uint8Array bbToUint8ArrayView(ByteBuffer byteBuffer) { + ArrayBufferView view = TypedArrayHelper.unwrap(byteBuffer); + return new Uint8Array(view.buffer, byteBuffer.position() + view.byteOffset, byteBuffer.remaining()); } public static Uint8Array emptyMessage() { FlatBufferBuilder builder = new FlatBufferBuilder(1024); int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, BarrageMessageType.None, 0); builder.finish(offset); - return new Uint8Array(TypedArrayHelper.unwrap(builder.dataBuffer())); + return bbToUint8ArrayView(builder.dataBuffer()); } public static InitialTableDefinition readTableDefinition(Schema schema) { @@ -199,7 +204,7 @@ public static Uint8Array makeUint8ArrayFromBitset(BitSet bitset) { public static ByteBuffer serializeRanges(Set rangeSets) { final RangeSet s; - if (rangeSets.size() == 0) { + if (rangeSets.isEmpty()) { return ByteBuffer.allocate(0); } else if (rangeSets.size() == 1) { s = rangeSets.iterator().next(); @@ -210,18 +215,12 @@ public static ByteBuffer serializeRanges(Set rangeSets) { } } - ByteBuffer payload = CompressedRangeSetReader.writeRange(s); - return payload; + return CompressedRangeSetReader.writeRange(s); } - public static ByteBuffer typedArrayToLittleEndianByteBuffer(Uint8Array data) { - ByteBuffer bb = TypedArrayHelper.wrap(data); - bb.order(ByteOrder.LITTLE_ENDIAN); - return bb; - } - - public static ByteBuffer typedArrayToLittleEndianByteBuffer(Int8Array data) { - ByteBuffer bb = TypedArrayHelper.wrap(data); + public static ByteBuffer typedArrayToAlignedLittleEndianByteBuffer(TypedArray data) { + // Slice before wrapping to align contents + ByteBuffer bb = TypedArrayHelper.wrap(data.slice()); bb.order(ByteOrder.LITTLE_ENDIAN); return bb; } @@ -379,19 +378,20 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes Buffer positions = buffers.next(); switch (columnType) { // for simple well-supported typedarray types, wrap and return - case "int": - assert positions.length() >= size * 4; - Int32Array intArray = new Int32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset(), size); + case "int": { + assert positions.length() >= size * 4L; + ArrayBufferView view = TypedArrayHelper.unwrap(data); + Int32Array intArray = new Int32Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); return new IntArrayColumnData(Js.uncheckedCast(intArray)); - case "short": - assert positions.length() >= size * 2; - Int16Array shortArray = new Int16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset(), size); + } + case "short": { + assert positions.length() >= size * 2L; + ArrayBufferView view = TypedArrayHelper.unwrap(data); + Int16Array shortArray = new Int16Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); return new ShortArrayColumnData(Js.uncheckedCast(shortArray)); + } case "boolean": - case "java.lang.Boolean": - // noinspection IntegerDivisionInFloatingPointContext + case "java.lang.Boolean": { assert positions.length() >= ((size + 63) / 64); // booleans are stored as a bitset, but internally we represent booleans as bytes data.position((int) positions.offset()); @@ -403,31 +403,40 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes } } return new BooleanArrayColumnData(boolArray); - case "byte": + } + case "byte": { assert positions.length() >= size; + ArrayBufferView view = TypedArrayHelper.unwrap(data); Int8Array byteArray = - new Int8Array(TypedArrayHelper.unwrap(data).buffer, (int) positions.offset(), size); + new Int8Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); return new ByteArrayColumnData(Js.uncheckedCast(byteArray)); - case "double": - assert positions.length() >= size * 8; - Float64Array doubleArray = new Float64Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset(), size); + } + case "double": { + assert positions.length() >= size * 8L; + ArrayBufferView view = TypedArrayHelper.unwrap(data); + Float64Array doubleArray = new Float64Array(view.buffer, + (int) (view.byteOffset + positions.offset()), size); return new DoubleArrayColumnData(Js.uncheckedCast(doubleArray)); - case "float": - assert positions.length() >= size * 4; - Float32Array floatArray = new Float32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset(), size); + } + case "float": { + assert positions.length() >= size * 4L; + ArrayBufferView view = TypedArrayHelper.unwrap(data); + Float32Array floatArray = new Float32Array(view.buffer, + (int) (view.byteOffset + positions.offset()), size); return new FloatArrayColumnData(Js.uncheckedCast(floatArray)); - case "char": - assert positions.length() >= size * 2; - Uint16Array charArray = new Uint16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset(), size); + } + case "char": { + assert positions.length() >= size * 2L; + ArrayBufferView view = TypedArrayHelper.unwrap(data); + Uint16Array charArray = new Uint16Array(view.buffer, + (int) (view.byteOffset + positions.offset()), size); return new CharArrayColumnData(Js.uncheckedCast(charArray)); + } // longs are a special case despite being java primitives case "long": case "java.time.Instant": - case "java.time.ZonedDateTime": - assert positions.length() >= size * 8; + case "java.time.ZonedDateTime": { + assert positions.length() >= size * 8L; long[] longArray = new long[size]; data.position((int) positions.offset()); @@ -435,9 +444,11 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes longArray[i] = data.getLong(); } return new LongArrayColumnData(longArray); + } // all other types are read out in some custom way case "java.time.LocalTime":// LocalDateArrayColumnData - assert positions.length() >= size * 6; + { + assert positions.length() >= size * 6L; data.position((int) positions.offset()); LocalDate[] localDateArray = new LocalDate[size]; for (int i = 0; i < size; i++) { @@ -447,8 +458,10 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes localDateArray[i] = new LocalDate(year, month, day); } return new LocalDateArrayColumnData(localDateArray); + } case "java.time.LocalDate":// LocalTimeArrayColumnData - assert positions.length() == size * 7; + { + assert positions.length() == size * 7L; LocalTime[] localTimeArray = new LocalTime[size]; data.position((int) positions.offset()); @@ -461,6 +474,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes localTimeArray[i] = new LocalTime(hour, minute, second, nano); } return new LocalTimeArrayColumnData(localTimeArray); + } default: // remaining types have an offset buffer to read first IntBuffer offsets = readOffsets(data, size, positions); @@ -618,5 +632,4 @@ private static IntBuffer readOffsets(ByteBuffer data, int size, Buffer buffer) { offsets.limit(size + 1); return offsets; } - } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index cd2acacb189..e5de2078083 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -49,7 +49,6 @@ import java.util.Collections; import java.util.Iterator; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.makeUint8ArrayFromBitset; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; @@ -364,27 +363,9 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { barrageMessageWrapper.msgPayloadAsByteBuffer()); } TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, - WebBarrageUtils.typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), update, - true, - columnTypes); - - // TODO deephaven-core(#188) this check no longer makes sense - Iterator rangeIterator = rows.getRange().rangeIterator(); - long expectedCount = 0; - while (rangeIterator.hasNext()) { - Range range = rangeIterator.next(); - if (range.getFirst() >= snapshot.getTableSize()) { - break; - } - long end = Math.min(range.getLast(), snapshot.getTableSize()); - expectedCount += end - range.getFirst() + 1; - } - if (expectedCount != snapshot.getIncludedRows().size()) { - callback.onFailure("Server did not send expected number of rows, expected " + expectedCount - + ", actual " + snapshot.getIncludedRows().size()); - } else { - callback.onSuccess(snapshot); - } + WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), + update, true, columnTypes); + callback.onSuccess(snapshot); }); stream.onStatus(status -> { if (!status.isOk()) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 7e53c04708d..e4e76075c1a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -67,7 +67,6 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.makeUint8ArrayFromBitset; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; @@ -678,7 +677,7 @@ private void replaceSubscription(RebuildStep step) { updateInterval, 0, 0, false); int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(doGetRequest, - TypedArrayHelper.wrap(viewTicket.ticket().getTicket_asU8())); + Js.uncheckedCast(viewTicket.ticket().getTicket_asU8())); BarrageSubscriptionRequest.startBarrageSubscriptionRequest(doGetRequest); BarrageSubscriptionRequest.addTicket(doGetRequest, tableTicketOffset); BarrageSubscriptionRequest.addColumns(doGetRequest, columnsOffset); @@ -721,10 +720,9 @@ private void replaceSubscription(RebuildStep step) { barrageMessageWrapper.msgPayloadAsByteBuffer()); } TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, - WebBarrageUtils.typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), - update, - true, - columnTypes); + WebBarrageUtils + .typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), + update, true, columnTypes); final RangeSet includedRows = snapshot.getIncludedRows(); double offset = firstRow; diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java index 5e46eedb823..c0528ee885f 100644 --- a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/ArrayReadWriteBuf.java @@ -1,6 +1,6 @@ package com.google.flatbuffers; -import java.nio.Numbers; +import org.gwtproject.nio.Numbers; import java.util.Arrays; /** @@ -198,7 +198,7 @@ public void setLong(int index, long value) { public void setFloat(int index, float value) { requestCapacity(index + 4); - int iValue = java.nio.Numbers.floatToIntBits(value); + int iValue = Numbers.floatToIntBits(value); buffer[index++] = (byte) ((iValue) & 0xff); buffer[index++] = (byte) ((iValue >> 8) & 0xff); buffer[index++] = (byte) ((iValue >> 16) & 0xff); @@ -209,7 +209,7 @@ public void setFloat(int index, float value) { public void setDouble(int index, double value) { requestCapacity(index + 8); - long lValue = java.nio.Numbers.doubleToRawLongBits(value); + long lValue = Numbers.doubleToRawLongBits(value); int i = (int) lValue; buffer[index++] = (byte) ((i) & 0xff); buffer[index++] = (byte) ((i >> 8) & 0xff); From 550023d32d39b8519d95a43d9be7332e4af4f642 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 29 Dec 2023 15:44:13 -0600 Subject: [PATCH 003/219] Delete generated gwt bindings --- .../org/apache/arrow/Flatbuf.java | 300 ---- .../apache/arrow/flatbuf/BodyCompression.java | 44 - .../arrow/flatbuf/BodyCompressionMethod.java | 15 - .../apache/arrow/flatbuf/CompressionType.java | 16 - .../apache/arrow/flatbuf/DictionaryBatch.java | 48 - .../org/apache/arrow/flatbuf/FieldNode.java | 29 - .../org/apache/arrow/flatbuf/Message.java | 82 - .../apache/arrow/flatbuf/MessageHeader.java | 18 - .../org/apache/arrow/flatbuf/RecordBatch.java | 64 - .../org/apache/arrow/Flatbuf.java | 1572 ----------------- .../org/apache/arrow/flatbuf/Binary.java | 34 - .../org/apache/arrow/flatbuf/Bool.java | 34 - .../org/apache/arrow/flatbuf/Buffer.java | 29 - .../org/apache/arrow/flatbuf/Date.java | 38 - .../org/apache/arrow/flatbuf/DateUnit.java | 16 - .../org/apache/arrow/flatbuf/Decimal.java | 47 - .../arrow/flatbuf/DictionaryEncoding.java | 53 - .../apache/arrow/flatbuf/DictionaryKind.java | 15 - .../org/apache/arrow/flatbuf/Duration.java | 38 - .../org/apache/arrow/flatbuf/Endianness.java | 16 - .../org/apache/arrow/flatbuf/Feature.java | 17 - .../org/apache/arrow/flatbuf/Field.java | 123 -- .../apache/arrow/flatbuf/FixedSizeBinary.java | 39 - .../apache/arrow/flatbuf/FixedSizeList.java | 39 - .../apache/arrow/flatbuf/FloatingPoint.java | 39 - .../org/apache/arrow/flatbuf/Int.java | 42 - .../org/apache/arrow/flatbuf/Interval.java | 38 - .../apache/arrow/flatbuf/IntervalUnit.java | 16 - .../org/apache/arrow/flatbuf/KeyValue.java | 106 -- .../org/apache/arrow/flatbuf/LargeBinary.java | 34 - .../org/apache/arrow/flatbuf/LargeList.java | 34 - .../org/apache/arrow/flatbuf/LargeUtf8.java | 34 - .../org/apache/arrow/flatbuf/List.java | 34 - .../org/apache/arrow/flatbuf/Map.java | 38 - .../apache/arrow/flatbuf/MetadataVersion.java | 19 - .../org/apache/arrow/flatbuf/Null.java | 34 - .../org/apache/arrow/flatbuf/Precision.java | 17 - .../org/apache/arrow/flatbuf/Schema.java | 100 -- .../org/apache/arrow/flatbuf/Struct_.java | 34 - .../org/apache/arrow/flatbuf/Time.java | 42 - .../org/apache/arrow/flatbuf/TimeUnit.java | 18 - .../org/apache/arrow/flatbuf/Timestamp.java | 77 - .../org/apache/arrow/flatbuf/Type.java | 36 - .../org/apache/arrow/flatbuf/Union.java | 122 -- .../org/apache/arrow/flatbuf/UnionMode.java | 16 - .../org/apache/arrow/flatbuf/Utf8.java | 34 - .../proto/dhinternal/flatbuffers/Builder.java | 143 -- .../dhinternal/flatbuffers/ByteBuffer.java | 115 -- .../dhinternal/flatbuffers/Encoding.java | 13 - .../proto/dhinternal/flatbuffers/Long.java | 25 - .../proto/dhinternal/flatbuffers/Table.java | 31 - .../barrage/flatbuf/BarrageMessageType.java | 23 - .../flatbuf/BarrageMessageWrapper.java | 130 -- .../flatbuf/BarrageModColumnMetadata.java | 125 -- .../flatbuf/BarragePublicationOptions.java | 42 - .../flatbuf/BarragePublicationRequest.java | 125 -- .../flatbuf/BarrageSnapshotOptions.java | 59 - .../flatbuf/BarrageSnapshotRequest.java | 287 --- .../flatbuf/BarrageSubscriptionOptions.java | 65 - .../flatbuf/BarrageSubscriptionRequest.java | 298 ---- .../flatbuf/BarrageUpdateMetadata.java | 575 ------ .../barrage/flatbuf/ColumnConversionMode.java | 17 - .../barrage/flatbuf/NewSessionRequest.java | 125 -- .../flatbuf/RefreshSessionRequest.java | 121 -- .../barrage/flatbuf/SessionInfoResponse.java | 211 --- 65 files changed, 6220 deletions(-) delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Builder.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/ByteBuffer.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Long.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Table.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationOptions.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationRequest.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotOptions.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotRequest.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionOptions.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java delete mode 100644 web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java deleted file mode 100644 index d32b397e89b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java +++ /dev/null @@ -1,300 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow; - -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.DictionaryBatch; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf.RecordBatch; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Schema; -import jsinterop.annotations.JsFunction; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf", - namespace = JsPackage.GLOBAL) -public class Flatbuf { - @JsFunction - public interface UnionListToMessageHeaderAccessorFn { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface P1UnionType { - @JsOverlay - static Flatbuf.UnionListToMessageHeaderAccessorFn.P1UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionType { - @JsOverlay - static Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - @JsOverlay - default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, DictionaryBatch p1) { - return onInvoke( - p0, Js.uncheckedCast(p1)); - } - - Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, Flatbuf.UnionListToMessageHeaderAccessorFn.P1UnionType p1); - - @JsOverlay - default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, RecordBatch p1) { - return onInvoke( - p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke(double p0, Schema p1) { - return onInvoke( - p0, Js.uncheckedCast(p1)); - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionListToMessageHeaderUnionType { - @JsOverlay - static Flatbuf.UnionListToMessageHeaderUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - @JsFunction - public interface UnionToMessageHeaderAccessorFn { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface P0UnionType { - @JsOverlay - static Flatbuf.UnionToMessageHeaderAccessorFn.P0UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionType { - @JsOverlay - static Flatbuf.UnionToMessageHeaderAccessorFn.UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - @JsOverlay - default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(DictionaryBatch p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke( - Flatbuf.UnionToMessageHeaderAccessorFn.P0UnionType p0); - - @JsOverlay - default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(RecordBatch p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(Schema p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionToMessageHeaderUnionType { - @JsOverlay - static Flatbuf.UnionToMessageHeaderUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default DictionaryBatch asDictionaryBatch() { - return Js.cast(this); - } - - @JsOverlay - default RecordBatch asRecordBatch() { - return Js.cast(this); - } - - @JsOverlay - default Schema asSchema() { - return Js.cast(this); - } - - @JsOverlay - default boolean isDictionaryBatch() { - return (Object) this instanceof DictionaryBatch; - } - - @JsOverlay - default boolean isRecordBatch() { - return (Object) this instanceof RecordBatch; - } - - @JsOverlay - default boolean isSchema() { - return (Object) this instanceof Schema; - } - } - - public static native Flatbuf.UnionListToMessageHeaderUnionType unionListToMessageHeader( - int type, Flatbuf.UnionListToMessageHeaderAccessorFn accessor, double index); - - public static native Flatbuf.UnionToMessageHeaderUnionType unionToMessageHeader( - int type, Flatbuf.UnionToMessageHeaderAccessorFn accessor); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java deleted file mode 100644 index 77a91516d9c..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java +++ /dev/null @@ -1,44 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompression", - namespace = JsPackage.GLOBAL) -public class BodyCompression { - public static native void addCodec(Builder builder, int codec); - - public static native void addMethod(Builder builder, int method); - - public static native double createBodyCompression( - Builder builder, int codec, int method); - - public static native double endBodyCompression(Builder builder); - - public static native BodyCompression getRootAsBodyCompression(ByteBuffer bb, BodyCompression obj); - - public static native BodyCompression getRootAsBodyCompression(ByteBuffer bb); - - public static native BodyCompression getSizePrefixedRootAsBodyCompression( - ByteBuffer bb, BodyCompression obj); - - public static native BodyCompression getSizePrefixedRootAsBodyCompression(ByteBuffer bb); - - public static native void startBodyCompression(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native BodyCompression __init(double i, ByteBuffer bb); - - public native int codec(); - - public native int method(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java deleted file mode 100644 index 0fa3c6919e4..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java +++ /dev/null @@ -1,15 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompressionMethod", - namespace = JsPackage.GLOBAL) -public class BodyCompressionMethod { - public static int BUFFER; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java deleted file mode 100644 index a395a0317e3..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java +++ /dev/null @@ -1,16 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.CompressionType", - namespace = JsPackage.GLOBAL) -public class CompressionType { - public static int LZ4_FRAME, - ZSTD; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java deleted file mode 100644 index b063716b3c4..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java +++ /dev/null @@ -1,48 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.DictionaryBatch", - namespace = JsPackage.GLOBAL) -public class DictionaryBatch { - public static native void addData(Builder builder, double dataOffset); - - public static native void addId(Builder builder, Long id); - - public static native void addIsDelta(Builder builder, boolean isDelta); - - public static native double endDictionaryBatch(Builder builder); - - public static native DictionaryBatch getRootAsDictionaryBatch(ByteBuffer bb, DictionaryBatch obj); - - public static native DictionaryBatch getRootAsDictionaryBatch(ByteBuffer bb); - - public static native DictionaryBatch getSizePrefixedRootAsDictionaryBatch( - ByteBuffer bb, DictionaryBatch obj); - - public static native DictionaryBatch getSizePrefixedRootAsDictionaryBatch(ByteBuffer bb); - - public static native void startDictionaryBatch(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native DictionaryBatch __init(double i, ByteBuffer bb); - - public native RecordBatch data(); - - public native RecordBatch data(RecordBatch obj); - - public native Long id(); - - public native boolean isDelta(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java deleted file mode 100644 index e3751057417..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java +++ /dev/null @@ -1,29 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.FieldNode", - namespace = JsPackage.GLOBAL) -public class FieldNode { - public static native double createFieldNode(Builder builder, Long length, Long null_count); - - public static native double sizeOf(); - - public ByteBuffer bb; - public double bb_pos; - - public native FieldNode __init(double i, ByteBuffer bb); - - public native Long length(); - - public native Long nullCount(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java deleted file mode 100644 index 691e7947369..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java +++ /dev/null @@ -1,82 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import elemental2.core.JsArray; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.KeyValue; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.Message", - namespace = JsPackage.GLOBAL) -public class Message { - public static native void addBodyLength(Builder builder, Long bodyLength); - - public static native void addCustomMetadata(Builder builder, double customMetadataOffset); - - public static native void addHeader(Builder builder, double headerOffset); - - public static native void addHeaderType(Builder builder, int headerType); - - public static native void addVersion(Builder builder, int version); - - public static native double createCustomMetadataVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createCustomMetadataVector(Builder builder, double[] data) { - return createCustomMetadataVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createMessage( - Builder builder, - int version, - int headerType, - double headerOffset, - Long bodyLength, - double customMetadataOffset); - - public static native double endMessage(Builder builder); - - public static native void finishMessageBuffer(Builder builder, double offset); - - public static native void finishSizePrefixedMessageBuffer(Builder builder, double offset); - - public static native Message getRootAsMessage(ByteBuffer bb, Message obj); - - public static native Message getRootAsMessage(ByteBuffer bb); - - public static native Message getSizePrefixedRootAsMessage(ByteBuffer bb, Message obj); - - public static native Message getSizePrefixedRootAsMessage(ByteBuffer bb); - - public static native void startCustomMetadataVector(Builder builder, double numElems); - - public static native void startMessage(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Message __init(double i, ByteBuffer bb); - - public native Long bodyLength(); - - public native KeyValue customMetadata(double index, KeyValue obj); - - public native KeyValue customMetadata(double index); - - public native double customMetadataLength(); - - public native T header(T obj); - - public native int headerType(); - - public native int version(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java deleted file mode 100644 index 48bae3c723d..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java +++ /dev/null @@ -1,18 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.MessageHeader", - namespace = JsPackage.GLOBAL) -public class MessageHeader { - public static int DictionaryBatch, - NONE, - RecordBatch, - Schema; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java deleted file mode 100644 index 619fbee84cb..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java +++ /dev/null @@ -1,64 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.message_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Buffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.RecordBatch", - namespace = JsPackage.GLOBAL) -public class RecordBatch { - public static native void addBuffers(Builder builder, double buffersOffset); - - public static native void addCompression(Builder builder, double compressionOffset); - - public static native void addLength(Builder builder, Long length); - - public static native void addNodes(Builder builder, double nodesOffset); - - public static native double endRecordBatch(Builder builder); - - public static native RecordBatch getRootAsRecordBatch(ByteBuffer bb, RecordBatch obj); - - public static native RecordBatch getRootAsRecordBatch(ByteBuffer bb); - - public static native RecordBatch getSizePrefixedRootAsRecordBatch(ByteBuffer bb, RecordBatch obj); - - public static native RecordBatch getSizePrefixedRootAsRecordBatch(ByteBuffer bb); - - public static native void startBuffersVector(Builder builder, double numElems); - - public static native void startNodesVector(Builder builder, double numElems); - - public static native void startRecordBatch(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native RecordBatch __init(double i, ByteBuffer bb); - - public native Buffer buffers(double index, Buffer obj); - - public native Buffer buffers(double index); - - public native double buffersLength(); - - public native BodyCompression compression(); - - public native BodyCompression compression(BodyCompression obj); - - public native Long length(); - - public native FieldNode nodes(double index, FieldNode obj); - - public native FieldNode nodes(double index); - - public native double nodesLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java deleted file mode 100644 index c74661f7d31..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java +++ /dev/null @@ -1,1572 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow; - -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Binary; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Bool; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Date; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Decimal; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Duration; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.FixedSizeBinary; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.FixedSizeList; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.FloatingPoint; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Int; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Interval; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.LargeBinary; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.LargeList; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.LargeUtf8; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.List; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Map; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Null; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Struct_; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Time; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Timestamp; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Union; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf.Utf8; -import jsinterop.annotations.JsFunction; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf", - namespace = JsPackage.GLOBAL) -public class Flatbuf { - @JsFunction - public interface UnionListToTypeAccessorFn { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface P1UnionType { - @JsOverlay - static Flatbuf.UnionListToTypeAccessorFn.P1UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionType { - @JsOverlay - static Flatbuf.UnionListToTypeAccessorFn.UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Binary p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Bool p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Date p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Decimal p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Duration p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FixedSizeBinary p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FixedSizeList p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FloatingPoint p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Int p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Interval p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeBinary p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeList p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeUtf8 p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, List p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Map p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Null p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke( - double p0, Flatbuf.UnionListToTypeAccessorFn.P1UnionType p1); - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Struct_ p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Time p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Timestamp p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Union p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - - @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Utf8 p1) { - return onInvoke(p0, Js.uncheckedCast(p1)); - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionListToTypeUnionType { - @JsOverlay - static Flatbuf.UnionListToTypeUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - @JsFunction - public interface UnionToTypeAccessorFn { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface P0UnionType { - @JsOverlay - static Flatbuf.UnionToTypeAccessorFn.P0UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionType { - @JsOverlay - static Flatbuf.UnionToTypeAccessorFn.UnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Binary p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Bool p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Date p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Decimal p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Duration p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(FixedSizeBinary p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(FixedSizeList p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(FloatingPoint p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Int p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Interval p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(LargeBinary p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(LargeList p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(LargeUtf8 p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(List p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Map p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Null p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Flatbuf.UnionToTypeAccessorFn.P0UnionType p0); - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Struct_ p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Time p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Timestamp p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Union p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - - @JsOverlay - default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Utf8 p0) { - return onInvoke(Js.uncheckedCast(p0)); - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface UnionToTypeUnionType { - @JsOverlay - static Flatbuf.UnionToTypeUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Binary asBinary() { - return Js.cast(this); - } - - @JsOverlay - default Bool asBool() { - return Js.cast(this); - } - - @JsOverlay - default Date asDate() { - return Js.cast(this); - } - - @JsOverlay - default Decimal asDecimal() { - return Js.cast(this); - } - - @JsOverlay - default Duration asDuration() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeBinary asFixedSizeBinary() { - return Js.cast(this); - } - - @JsOverlay - default FixedSizeList asFixedSizeList() { - return Js.cast(this); - } - - @JsOverlay - default FloatingPoint asFloatingPoint() { - return Js.cast(this); - } - - @JsOverlay - default Int asInt() { - return Js.cast(this); - } - - @JsOverlay - default Interval asInterval() { - return Js.cast(this); - } - - @JsOverlay - default LargeBinary asLargeBinary() { - return Js.cast(this); - } - - @JsOverlay - default LargeList asLargeList() { - return Js.cast(this); - } - - @JsOverlay - default LargeUtf8 asLargeUtf8() { - return Js.cast(this); - } - - @JsOverlay - default List asList() { - return Js.cast(this); - } - - @JsOverlay - default Map asMap() { - return Js.cast(this); - } - - @JsOverlay - default Null asNull() { - return Js.cast(this); - } - - @JsOverlay - default Struct_ asStruct_() { - return Js.cast(this); - } - - @JsOverlay - default Time asTime() { - return Js.cast(this); - } - - @JsOverlay - default Timestamp asTimestamp() { - return Js.cast(this); - } - - @JsOverlay - default Union asUnion() { - return Js.cast(this); - } - - @JsOverlay - default Utf8 asUtf8() { - return Js.cast(this); - } - - @JsOverlay - default boolean isBinary() { - return (Object) this instanceof Binary; - } - - @JsOverlay - default boolean isBool() { - return (Object) this instanceof Bool; - } - - @JsOverlay - default boolean isDate() { - return (Object) this instanceof Date; - } - - @JsOverlay - default boolean isDecimal() { - return (Object) this instanceof Decimal; - } - - @JsOverlay - default boolean isDuration() { - return (Object) this instanceof Duration; - } - - @JsOverlay - default boolean isFixedSizeBinary() { - return (Object) this instanceof FixedSizeBinary; - } - - @JsOverlay - default boolean isFixedSizeList() { - return (Object) this instanceof FixedSizeList; - } - - @JsOverlay - default boolean isFloatingPoint() { - return (Object) this instanceof FloatingPoint; - } - - @JsOverlay - default boolean isInt() { - return (Object) this instanceof Int; - } - - @JsOverlay - default boolean isInterval() { - return (Object) this instanceof Interval; - } - - @JsOverlay - default boolean isLargeBinary() { - return (Object) this instanceof LargeBinary; - } - - @JsOverlay - default boolean isLargeList() { - return (Object) this instanceof LargeList; - } - - @JsOverlay - default boolean isLargeUtf8() { - return (Object) this instanceof LargeUtf8; - } - - @JsOverlay - default boolean isList() { - return (Object) this instanceof List; - } - - @JsOverlay - default boolean isMap() { - return (Object) this instanceof Map; - } - - @JsOverlay - default boolean isNull() { - return (Object) this instanceof Null; - } - - @JsOverlay - default boolean isStruct_() { - return (Object) this instanceof Struct_; - } - - @JsOverlay - default boolean isTime() { - return (Object) this instanceof Time; - } - - @JsOverlay - default boolean isTimestamp() { - return (Object) this instanceof Timestamp; - } - - @JsOverlay - default boolean isUnion() { - return (Object) this instanceof Union; - } - - @JsOverlay - default boolean isUtf8() { - return (Object) this instanceof Utf8; - } - } - - public static native Flatbuf.UnionListToTypeUnionType unionListToType( - int type, Flatbuf.UnionListToTypeAccessorFn accessor, double index); - - public static native Flatbuf.UnionToTypeUnionType unionToType( - int type, Flatbuf.UnionToTypeAccessorFn accessor); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java deleted file mode 100644 index 9174b46ad62..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Binary", - namespace = JsPackage.GLOBAL) -public class Binary { - public static native double createBinary(Builder builder); - - public static native double endBinary(Builder builder); - - public static native Binary getRootAsBinary(ByteBuffer bb, Binary obj); - - public static native Binary getRootAsBinary(ByteBuffer bb); - - public static native Binary getSizePrefixedRootAsBinary(ByteBuffer bb, Binary obj); - - public static native Binary getSizePrefixedRootAsBinary(ByteBuffer bb); - - public static native void startBinary(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Binary __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java deleted file mode 100644 index a995e0e6a1d..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Bool", - namespace = JsPackage.GLOBAL) -public class Bool { - public static native double createBool(Builder builder); - - public static native double endBool(Builder builder); - - public static native Bool getRootAsBool(ByteBuffer bb, Bool obj); - - public static native Bool getRootAsBool(ByteBuffer bb); - - public static native Bool getSizePrefixedRootAsBool(ByteBuffer bb, Bool obj); - - public static native Bool getSizePrefixedRootAsBool(ByteBuffer bb); - - public static native void startBool(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Bool __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java deleted file mode 100644 index 9cb4e6bc3c6..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java +++ /dev/null @@ -1,29 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Buffer", - namespace = JsPackage.GLOBAL) -public class Buffer { - public static native double createBuffer(Builder builder, Long offset, Long length); - - public static native double sizeOf(); - - public ByteBuffer bb; - public double bb_pos; - - public native Buffer __init(double i, ByteBuffer bb); - - public native Long length(); - - public native Long offset(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java deleted file mode 100644 index 37483e70bcc..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java +++ /dev/null @@ -1,38 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Date", - namespace = JsPackage.GLOBAL) -public class Date { - public static native void addUnit(Builder builder, int unit); - - public static native double createDate(Builder builder, int unit); - - public static native double endDate(Builder builder); - - public static native Date getRootAsDate(ByteBuffer bb, Date obj); - - public static native Date getRootAsDate(ByteBuffer bb); - - public static native Date getSizePrefixedRootAsDate(ByteBuffer bb, Date obj); - - public static native Date getSizePrefixedRootAsDate(ByteBuffer bb); - - public static native void startDate(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Date __init(double i, ByteBuffer bb); - - public native int unit(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java deleted file mode 100644 index b5edb152cb7..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java +++ /dev/null @@ -1,16 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DateUnit", - namespace = JsPackage.GLOBAL) -public class DateUnit { - public static int DAY, - MILLISECOND; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java deleted file mode 100644 index fa3ee35026b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java +++ /dev/null @@ -1,47 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Decimal", - namespace = JsPackage.GLOBAL) -public class Decimal { - public static native void addBitWidth(Builder builder, double bitWidth); - - public static native void addPrecision(Builder builder, double precision); - - public static native void addScale(Builder builder, double scale); - - public static native double createDecimal( - Builder builder, double precision, double scale, double bitWidth); - - public static native double endDecimal(Builder builder); - - public static native Decimal getRootAsDecimal(ByteBuffer bb, Decimal obj); - - public static native Decimal getRootAsDecimal(ByteBuffer bb); - - public static native Decimal getSizePrefixedRootAsDecimal(ByteBuffer bb, Decimal obj); - - public static native Decimal getSizePrefixedRootAsDecimal(ByteBuffer bb); - - public static native void startDecimal(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Decimal __init(double i, ByteBuffer bb); - - public native double bitWidth(); - - public native double precision(); - - public native double scale(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java deleted file mode 100644 index 9be0497b0ec..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java +++ /dev/null @@ -1,53 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryEncoding", - namespace = JsPackage.GLOBAL) -public class DictionaryEncoding { - public static native void addDictionaryKind(Builder builder, int dictionaryKind); - - public static native void addId(Builder builder, Long id); - - public static native void addIndexType(Builder builder, double indexTypeOffset); - - public static native void addIsOrdered(Builder builder, boolean isOrdered); - - public static native double endDictionaryEncoding(Builder builder); - - public static native DictionaryEncoding getRootAsDictionaryEncoding( - ByteBuffer bb, DictionaryEncoding obj); - - public static native DictionaryEncoding getRootAsDictionaryEncoding(ByteBuffer bb); - - public static native DictionaryEncoding getSizePrefixedRootAsDictionaryEncoding( - ByteBuffer bb, DictionaryEncoding obj); - - public static native DictionaryEncoding getSizePrefixedRootAsDictionaryEncoding(ByteBuffer bb); - - public static native void startDictionaryEncoding(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native DictionaryEncoding __init(double i, ByteBuffer bb); - - public native int dictionaryKind(); - - public native Long id(); - - public native Int indexType(); - - public native Int indexType(Int obj); - - public native boolean isOrdered(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java deleted file mode 100644 index 0a78b7f3036..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java +++ /dev/null @@ -1,15 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryKind", - namespace = JsPackage.GLOBAL) -public class DictionaryKind { - public static int DenseArray; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java deleted file mode 100644 index ae2bc2aee9d..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java +++ /dev/null @@ -1,38 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Duration", - namespace = JsPackage.GLOBAL) -public class Duration { - public static native void addUnit(Builder builder, int unit); - - public static native double createDuration(Builder builder, int unit); - - public static native double endDuration(Builder builder); - - public static native Duration getRootAsDuration(ByteBuffer bb, Duration obj); - - public static native Duration getRootAsDuration(ByteBuffer bb); - - public static native Duration getSizePrefixedRootAsDuration(ByteBuffer bb, Duration obj); - - public static native Duration getSizePrefixedRootAsDuration(ByteBuffer bb); - - public static native void startDuration(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Duration __init(double i, ByteBuffer bb); - - public native int unit(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java deleted file mode 100644 index e29338b855b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java +++ /dev/null @@ -1,16 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Endianness", - namespace = JsPackage.GLOBAL) -public class Endianness { - public static int Big, - Little; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java deleted file mode 100644 index 5b81178a43e..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java +++ /dev/null @@ -1,17 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Feature", - namespace = JsPackage.GLOBAL) -public class Feature { - public static int COMPRESSED_BODY, - DICTIONARY_REPLACEMENT, - UNUSED; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java deleted file mode 100644 index 6cdc1706df3..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java +++ /dev/null @@ -1,123 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Encoding; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Field", - namespace = JsPackage.GLOBAL) -public class Field { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface NameUnionType { - @JsOverlay - static Field.NameUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addChildren(Builder builder, double childrenOffset); - - public static native void addCustomMetadata(Builder builder, double customMetadataOffset); - - public static native void addDictionary(Builder builder, double dictionaryOffset); - - public static native void addName(Builder builder, double nameOffset); - - public static native void addNullable(Builder builder, boolean nullable); - - public static native void addType(Builder builder, double typeOffset); - - public static native void addTypeType(Builder builder, int typeType); - - public static native double createChildrenVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createChildrenVector(Builder builder, double[] data) { - return createChildrenVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createCustomMetadataVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createCustomMetadataVector(Builder builder, double[] data) { - return createCustomMetadataVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endField(Builder builder); - - public static native Field getRootAsField(ByteBuffer bb, Field obj); - - public static native Field getRootAsField(ByteBuffer bb); - - public static native Field getSizePrefixedRootAsField(ByteBuffer bb, Field obj); - - public static native Field getSizePrefixedRootAsField(ByteBuffer bb); - - public static native void startChildrenVector(Builder builder, double numElems); - - public static native void startCustomMetadataVector(Builder builder, double numElems); - - public static native void startField(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Field __init(double i, ByteBuffer bb); - - public native Field children(double index, Field obj); - - public native Field children(double index); - - public native double childrenLength(); - - public native KeyValue customMetadata(double index, KeyValue obj); - - public native KeyValue customMetadata(double index); - - public native double customMetadataLength(); - - public native DictionaryEncoding dictionary(); - - public native DictionaryEncoding dictionary(DictionaryEncoding obj); - - public native Field.NameUnionType name(); - - public native Field.NameUnionType name(Encoding optionalEncoding); - - public native boolean nullable(); - - public native T type(T obj); - - public native int typeType(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java deleted file mode 100644 index 701078f258f..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java +++ /dev/null @@ -1,39 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeBinary", - namespace = JsPackage.GLOBAL) -public class FixedSizeBinary { - public static native void addByteWidth(Builder builder, double byteWidth); - - public static native double createFixedSizeBinary(Builder builder, double byteWidth); - - public static native double endFixedSizeBinary(Builder builder); - - public static native FixedSizeBinary getRootAsFixedSizeBinary(ByteBuffer bb, FixedSizeBinary obj); - - public static native FixedSizeBinary getRootAsFixedSizeBinary(ByteBuffer bb); - - public static native FixedSizeBinary getSizePrefixedRootAsFixedSizeBinary( - ByteBuffer bb, FixedSizeBinary obj); - - public static native FixedSizeBinary getSizePrefixedRootAsFixedSizeBinary(ByteBuffer bb); - - public static native void startFixedSizeBinary(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native FixedSizeBinary __init(double i, ByteBuffer bb); - - public native double byteWidth(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java deleted file mode 100644 index 6e7f302b85d..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java +++ /dev/null @@ -1,39 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeList", - namespace = JsPackage.GLOBAL) -public class FixedSizeList { - public static native void addListSize(Builder builder, double listSize); - - public static native double createFixedSizeList(Builder builder, double listSize); - - public static native double endFixedSizeList(Builder builder); - - public static native FixedSizeList getRootAsFixedSizeList(ByteBuffer bb, FixedSizeList obj); - - public static native FixedSizeList getRootAsFixedSizeList(ByteBuffer bb); - - public static native FixedSizeList getSizePrefixedRootAsFixedSizeList( - ByteBuffer bb, FixedSizeList obj); - - public static native FixedSizeList getSizePrefixedRootAsFixedSizeList(ByteBuffer bb); - - public static native void startFixedSizeList(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native FixedSizeList __init(double i, ByteBuffer bb); - - public native double listSize(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java deleted file mode 100644 index 739c3f455d6..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java +++ /dev/null @@ -1,39 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FloatingPoint", - namespace = JsPackage.GLOBAL) -public class FloatingPoint { - public static native void addPrecision(Builder builder, int precision); - - public static native double createFloatingPoint(Builder builder, int precision); - - public static native double endFloatingPoint(Builder builder); - - public static native FloatingPoint getRootAsFloatingPoint(ByteBuffer bb, FloatingPoint obj); - - public static native FloatingPoint getRootAsFloatingPoint(ByteBuffer bb); - - public static native FloatingPoint getSizePrefixedRootAsFloatingPoint( - ByteBuffer bb, FloatingPoint obj); - - public static native FloatingPoint getSizePrefixedRootAsFloatingPoint(ByteBuffer bb); - - public static native void startFloatingPoint(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native FloatingPoint __init(double i, ByteBuffer bb); - - public native int precision(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java deleted file mode 100644 index c1441852b02..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java +++ /dev/null @@ -1,42 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Int", - namespace = JsPackage.GLOBAL) -public class Int { - public static native void addBitWidth(Builder builder, double bitWidth); - - public static native void addIsSigned(Builder builder, boolean isSigned); - - public static native double createInt(Builder builder, double bitWidth, boolean isSigned); - - public static native double endInt(Builder builder); - - public static native Int getRootAsInt(ByteBuffer bb, Int obj); - - public static native Int getRootAsInt(ByteBuffer bb); - - public static native Int getSizePrefixedRootAsInt(ByteBuffer bb, Int obj); - - public static native Int getSizePrefixedRootAsInt(ByteBuffer bb); - - public static native void startInt(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Int __init(double i, ByteBuffer bb); - - public native double bitWidth(); - - public native boolean isSigned(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java deleted file mode 100644 index c844a0f89d2..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java +++ /dev/null @@ -1,38 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Interval", - namespace = JsPackage.GLOBAL) -public class Interval { - public static native void addUnit(Builder builder, int unit); - - public static native double createInterval(Builder builder, int unit); - - public static native double endInterval(Builder builder); - - public static native Interval getRootAsInterval(ByteBuffer bb, Interval obj); - - public static native Interval getRootAsInterval(ByteBuffer bb); - - public static native Interval getSizePrefixedRootAsInterval(ByteBuffer bb, Interval obj); - - public static native Interval getSizePrefixedRootAsInterval(ByteBuffer bb); - - public static native void startInterval(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Interval __init(double i, ByteBuffer bb); - - public native int unit(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java deleted file mode 100644 index aee19255211..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java +++ /dev/null @@ -1,16 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.IntervalUnit", - namespace = JsPackage.GLOBAL) -public class IntervalUnit { - public static int DAY_TIME, - YEAR_MONTH; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java deleted file mode 100644 index 3a7511d91eb..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java +++ /dev/null @@ -1,106 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Encoding; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.KeyValue", - namespace = JsPackage.GLOBAL) -public class KeyValue { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface KeyUnionType { - @JsOverlay - static KeyValue.KeyUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface ValueUnionType { - @JsOverlay - static KeyValue.ValueUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addKey(Builder builder, double keyOffset); - - public static native void addValue(Builder builder, double valueOffset); - - public static native double createKeyValue(Builder builder, double keyOffset, double valueOffset); - - public static native double endKeyValue(Builder builder); - - public static native KeyValue getRootAsKeyValue(ByteBuffer bb, KeyValue obj); - - public static native KeyValue getRootAsKeyValue(ByteBuffer bb); - - public static native KeyValue getSizePrefixedRootAsKeyValue(ByteBuffer bb, KeyValue obj); - - public static native KeyValue getSizePrefixedRootAsKeyValue(ByteBuffer bb); - - public static native void startKeyValue(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native KeyValue __init(double i, ByteBuffer bb); - - public native KeyValue.KeyUnionType key(); - - public native KeyValue.KeyUnionType key(Encoding optionalEncoding); - - public native KeyValue.ValueUnionType value(); - - public native KeyValue.ValueUnionType value(Encoding optionalEncoding); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java deleted file mode 100644 index 98b3ca5480b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeBinary", - namespace = JsPackage.GLOBAL) -public class LargeBinary { - public static native double createLargeBinary(Builder builder); - - public static native double endLargeBinary(Builder builder); - - public static native LargeBinary getRootAsLargeBinary(ByteBuffer bb, LargeBinary obj); - - public static native LargeBinary getRootAsLargeBinary(ByteBuffer bb); - - public static native LargeBinary getSizePrefixedRootAsLargeBinary(ByteBuffer bb, LargeBinary obj); - - public static native LargeBinary getSizePrefixedRootAsLargeBinary(ByteBuffer bb); - - public static native void startLargeBinary(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native LargeBinary __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java deleted file mode 100644 index c7581db121c..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeList", - namespace = JsPackage.GLOBAL) -public class LargeList { - public static native double createLargeList(Builder builder); - - public static native double endLargeList(Builder builder); - - public static native LargeList getRootAsLargeList(ByteBuffer bb, LargeList obj); - - public static native LargeList getRootAsLargeList(ByteBuffer bb); - - public static native LargeList getSizePrefixedRootAsLargeList(ByteBuffer bb, LargeList obj); - - public static native LargeList getSizePrefixedRootAsLargeList(ByteBuffer bb); - - public static native void startLargeList(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native LargeList __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java deleted file mode 100644 index 55031aa3e29..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeUtf8", - namespace = JsPackage.GLOBAL) -public class LargeUtf8 { - public static native double createLargeUtf8(Builder builder); - - public static native double endLargeUtf8(Builder builder); - - public static native LargeUtf8 getRootAsLargeUtf8(ByteBuffer bb, LargeUtf8 obj); - - public static native LargeUtf8 getRootAsLargeUtf8(ByteBuffer bb); - - public static native LargeUtf8 getSizePrefixedRootAsLargeUtf8(ByteBuffer bb, LargeUtf8 obj); - - public static native LargeUtf8 getSizePrefixedRootAsLargeUtf8(ByteBuffer bb); - - public static native void startLargeUtf8(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native LargeUtf8 __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java deleted file mode 100644 index f4a0f64ba0e..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.List", - namespace = JsPackage.GLOBAL) -public class List { - public static native double createList(Builder builder); - - public static native double endList(Builder builder); - - public static native List getRootAsList(ByteBuffer bb, List obj); - - public static native List getRootAsList(ByteBuffer bb); - - public static native List getSizePrefixedRootAsList(ByteBuffer bb, List obj); - - public static native List getSizePrefixedRootAsList(ByteBuffer bb); - - public static native void startList(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native List __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java deleted file mode 100644 index 1a2e54e0fc6..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java +++ /dev/null @@ -1,38 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Map", - namespace = JsPackage.GLOBAL) -public class Map { - public static native void addKeysSorted(Builder builder, boolean keysSorted); - - public static native double createMap(Builder builder, boolean keysSorted); - - public static native double endMap(Builder builder); - - public static native Map getRootAsMap(ByteBuffer bb, Map obj); - - public static native Map getRootAsMap(ByteBuffer bb); - - public static native Map getSizePrefixedRootAsMap(ByteBuffer bb, Map obj); - - public static native Map getSizePrefixedRootAsMap(ByteBuffer bb); - - public static native void startMap(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Map __init(double i, ByteBuffer bb); - - public native boolean keysSorted(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java deleted file mode 100644 index 6e26ef3e21b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java +++ /dev/null @@ -1,19 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.MetadataVersion", - namespace = JsPackage.GLOBAL) -public class MetadataVersion { - public static int V1, - V2, - V3, - V4, - V5; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java deleted file mode 100644 index a4c761da693..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Null", - namespace = JsPackage.GLOBAL) -public class Null { - public static native double createNull(Builder builder); - - public static native double endNull(Builder builder); - - public static native Null getRootAsNull(ByteBuffer bb, Null obj); - - public static native Null getRootAsNull(ByteBuffer bb); - - public static native Null getSizePrefixedRootAsNull(ByteBuffer bb, Null obj); - - public static native Null getSizePrefixedRootAsNull(ByteBuffer bb); - - public static native void startNull(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Null __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java deleted file mode 100644 index 0c7f6cad0e3..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java +++ /dev/null @@ -1,17 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Precision", - namespace = JsPackage.GLOBAL) -public class Precision { - public static int DOUBLE, - HALF, - SINGLE; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java deleted file mode 100644 index 23ddc811ef7..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java +++ /dev/null @@ -1,100 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import elemental2.core.JsArray; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Schema", - namespace = JsPackage.GLOBAL) -public class Schema { - public static native void addCustomMetadata(Builder builder, double customMetadataOffset); - - public static native void addEndianness(Builder builder, int endianness); - - public static native void addFeatures(Builder builder, double featuresOffset); - - public static native void addFields(Builder builder, double fieldsOffset); - - public static native double createCustomMetadataVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createCustomMetadataVector(Builder builder, double[] data) { - return createCustomMetadataVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createFeaturesVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createFeaturesVector(Builder builder, Object[] data) { - return createFeaturesVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createFieldsVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createFieldsVector(Builder builder, double[] data) { - return createFieldsVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createSchema( - Builder builder, - int endianness, - double fieldsOffset, - double customMetadataOffset, - double featuresOffset); - - public static native double endSchema(Builder builder); - - public static native void finishSchemaBuffer(Builder builder, double offset); - - public static native void finishSizePrefixedSchemaBuffer(Builder builder, double offset); - - public static native Schema getRootAsSchema(ByteBuffer bb, Schema obj); - - public static native Schema getRootAsSchema(ByteBuffer bb); - - public static native Schema getSizePrefixedRootAsSchema(ByteBuffer bb, Schema obj); - - public static native Schema getSizePrefixedRootAsSchema(ByteBuffer bb); - - public static native void startCustomMetadataVector(Builder builder, double numElems); - - public static native void startFeaturesVector(Builder builder, double numElems); - - public static native void startFieldsVector(Builder builder, double numElems); - - public static native void startSchema(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Schema __init(double i, ByteBuffer bb); - - public native KeyValue customMetadata(double index, KeyValue obj); - - public native KeyValue customMetadata(double index); - - public native double customMetadataLength(); - - public native int endianness(); - - public native Long features(double index); - - public native double featuresLength(); - - public native Field fields(double index, Field obj); - - public native Field fields(double index); - - public native double fieldsLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java deleted file mode 100644 index 62c88e3f8ae..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Struct_", - namespace = JsPackage.GLOBAL) -public class Struct_ { - public static native double createStruct_(Builder builder); - - public static native double endStruct_(Builder builder); - - public static native Struct_ getRootAsStruct_(ByteBuffer bb, Struct_ obj); - - public static native Struct_ getRootAsStruct_(ByteBuffer bb); - - public static native Struct_ getSizePrefixedRootAsStruct_(ByteBuffer bb, Struct_ obj); - - public static native Struct_ getSizePrefixedRootAsStruct_(ByteBuffer bb); - - public static native void startStruct_(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Struct_ __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java deleted file mode 100644 index 0f40072c5dd..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java +++ /dev/null @@ -1,42 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Time", - namespace = JsPackage.GLOBAL) -public class Time { - public static native void addBitWidth(Builder builder, double bitWidth); - - public static native void addUnit(Builder builder, int unit); - - public static native double createTime(Builder builder, int unit, double bitWidth); - - public static native double endTime(Builder builder); - - public static native Time getRootAsTime(ByteBuffer bb, Time obj); - - public static native Time getRootAsTime(ByteBuffer bb); - - public static native Time getSizePrefixedRootAsTime(ByteBuffer bb, Time obj); - - public static native Time getSizePrefixedRootAsTime(ByteBuffer bb); - - public static native void startTime(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Time __init(double i, ByteBuffer bb); - - public native double bitWidth(); - - public native int unit(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java deleted file mode 100644 index 780d7cc9e08..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java +++ /dev/null @@ -1,18 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.TimeUnit", - namespace = JsPackage.GLOBAL) -public class TimeUnit { - public static int MICROSECOND, - MILLISECOND, - NANOSECOND, - SECOND; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java deleted file mode 100644 index fa1ef0923ed..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java +++ /dev/null @@ -1,77 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Encoding; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Timestamp", - namespace = JsPackage.GLOBAL) -public class Timestamp { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface TimezoneUnionType { - @JsOverlay - static Timestamp.TimezoneUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addTimezone(Builder builder, double timezoneOffset); - - public static native void addUnit(Builder builder, int unit); - - public static native double createTimestamp( - Builder builder, int unit, double timezoneOffset); - - public static native double endTimestamp(Builder builder); - - public static native Timestamp getRootAsTimestamp(ByteBuffer bb, Timestamp obj); - - public static native Timestamp getRootAsTimestamp(ByteBuffer bb); - - public static native Timestamp getSizePrefixedRootAsTimestamp(ByteBuffer bb, Timestamp obj); - - public static native Timestamp getSizePrefixedRootAsTimestamp(ByteBuffer bb); - - public static native void startTimestamp(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Timestamp __init(double i, ByteBuffer bb); - - public native Timestamp.TimezoneUnionType timezone(); - - public native Timestamp.TimezoneUnionType timezone(Encoding optionalEncoding); - - public native int unit(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java deleted file mode 100644 index 6336d2a547f..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java +++ /dev/null @@ -1,36 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Type", - namespace = JsPackage.GLOBAL) -public class Type { - public static int Binary, - Bool, - Date, - Decimal, - Duration, - FixedSizeBinary, - FixedSizeList, - FloatingPoint, - Int, - Interval, - LargeBinary, - LargeList, - LargeUtf8, - List, - Map, - NONE, - Null, - Struct_, - Time, - Timestamp, - Union, - Utf8; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java deleted file mode 100644 index a3ff98e8975..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java +++ /dev/null @@ -1,122 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import elemental2.core.Int32Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Union", - namespace = JsPackage.GLOBAL) -public class Union { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateTypeIdsVectorDataUnionType { - @JsOverlay - static Union.CreateTypeIdsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int32Array asInt32Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt32Array() { - return (Object) this instanceof Int32Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addMode(Builder builder, int mode); - - public static native void addTypeIds(Builder builder, double typeIdsOffset); - - @Deprecated - public static native double createTypeIdsVector( - Builder builder, Union.CreateTypeIdsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createTypeIdsVector(Builder builder, Int32Array data) { - return createTypeIdsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTypeIdsVector(Builder builder, JsArray data) { - return createTypeIdsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTypeIdsVector(Builder builder, Uint8Array data) { - return createTypeIdsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTypeIdsVector(Builder builder, double[] data) { - return createTypeIdsVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createUnion(Builder builder, int mode, double typeIdsOffset); - - public static native double endUnion(Builder builder); - - public static native Union getRootAsUnion(ByteBuffer bb, Union obj); - - public static native Union getRootAsUnion(ByteBuffer bb); - - public static native Union getSizePrefixedRootAsUnion(ByteBuffer bb, Union obj); - - public static native Union getSizePrefixedRootAsUnion(ByteBuffer bb); - - public static native void startTypeIdsVector(Builder builder, double numElems); - - public static native void startUnion(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Union __init(double i, ByteBuffer bb); - - public native int mode(); - - public native double typeIds(double index); - - public native Int32Array typeIdsArray(); - - public native double typeIdsLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java deleted file mode 100644 index 30553887de2..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java +++ /dev/null @@ -1,16 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.UnionMode", - namespace = JsPackage.GLOBAL) -public class UnionMode { - public static int Dense, - Sparse; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java deleted file mode 100644 index 1149f1f8977..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java +++ /dev/null @@ -1,34 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.arrow.flight.flatbuf.schema_generated.org.apache.arrow.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Utf8", - namespace = JsPackage.GLOBAL) -public class Utf8 { - public static native double createUtf8(Builder builder); - - public static native double endUtf8(Builder builder); - - public static native Utf8 getRootAsUtf8(ByteBuffer bb, Utf8 obj); - - public static native Utf8 getRootAsUtf8(ByteBuffer bb); - - public static native Utf8 getSizePrefixedRootAsUtf8(ByteBuffer bb, Utf8 obj); - - public static native Utf8 getSizePrefixedRootAsUtf8(ByteBuffer bb); - - public static native void startUtf8(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native Utf8 __init(double i, ByteBuffer bb); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Builder.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Builder.java deleted file mode 100644 index 0c9118583d8..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Builder.java +++ /dev/null @@ -1,143 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.flatbuffers; - -import elemental2.core.Uint8Array; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType(isNative = true, name = "dhinternal.flatbuffers.Builder", namespace = JsPackage.GLOBAL) -public class Builder { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateStringSUnionType { - @JsOverlay - static Builder.CreateStringSUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native ByteBuffer growByteBuffer(ByteBuffer bb); - - public Builder() {} - - public Builder(double initial_size) {} - - public native void addFieldFloat32(double voffset, double value, double defaultValue); - - public native void addFieldFloat64(double voffset, double value, double defaultValue); - - public native void addFieldInt16(double voffset, double value, double defaultValue); - - public native void addFieldInt32(double voffset, double value, double defaultValue); - - public native void addFieldInt64(double voffset, Long value, Long defaultValue); - - public native void addFieldInt8(double voffset, double value, double defaultValue); - - public native void addFieldOffset(double voffset, double value, double defaultValue); - - public native void addFieldStruct(double voffset, double value, double defaultValue); - - public native void addFloat32(double value); - - public native void addFloat64(double value); - - public native void addInt16(double value); - - public native void addInt32(double value); - - public native void addInt64(Long value); - - public native void addInt8(double value); - - public native void addOffset(double offset); - - public native Uint8Array asUint8Array(); - - public native void clear(); - - public native Long createLong(double low, double high); - - public native double createString(Builder.CreateStringSUnionType s); - - @JsOverlay - public final double createString(String s) { - return createString(Js.uncheckedCast(s)); - } - - @JsOverlay - public final double createString(Uint8Array s) { - return createString(Js.uncheckedCast(s)); - } - - public native ByteBuffer dataBuffer(); - - public native double endObject(); - - public native double endVector(); - - public native void finish(double root_table, String file_identifier, boolean size_prefix); - - public native void finish(double root_table, String file_identifier); - - public native void finish(double root_table); - - public native void finishSizePrefixed(double root_table, String file_identifier); - - public native void finishSizePrefixed(double root_table); - - public native void forceDefaults(boolean forceDefaults); - - public native void nested(double obj); - - public native void notNested(); - - public native double offset(); - - public native void pad(double byte_size); - - public native void prep(double size, double additional_bytes); - - public native void requiredField(double table, double field); - - public native void slot(double voffset); - - public native void startObject(double numfields); - - public native void startVector(double elem_size, double num_elems, double alignment); - - public native void writeFloat32(double value); - - public native void writeFloat64(double value); - - public native void writeInt16(double value); - - public native void writeInt32(double value); - - public native void writeInt64(Long value); - - public native void writeInt8(double value); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/ByteBuffer.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/ByteBuffer.java deleted file mode 100644 index 85a71935d92..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/ByteBuffer.java +++ /dev/null @@ -1,115 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.flatbuffers; - -import elemental2.core.Uint8Array; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType(isNative = true, name = "dhinternal.flatbuffers.ByteBuffer", namespace = JsPackage.GLOBAL) -public class ByteBuffer { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface __stringUnionType { - @JsOverlay - static ByteBuffer.__stringUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default String asString() { - return Js.asString(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isString() { - return (Object) this instanceof String; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native ByteBuffer allocate(double byte_size); - - public ByteBuffer(Uint8Array bytes) {} - - public native boolean __has_identifier(String ident); - - public native double __indirect(double offset); - - public native double __offset(double bb_pos, double vtable_offset); - - public native ByteBuffer.__stringUnionType __string(double offset, int optionalEncoding); - - public native ByteBuffer.__stringUnionType __string(double offset); - - public native T __union(T t, double offset); - - public native double __vector(double offset); - - public native double __vector_len(double offset); - - public native Uint8Array bytes(); - - public native double capacity(); - - public native void clear(); - - public native Long createLong(double low, double high); - - public native String getBufferIdentifier(); - - public native double position(); - - public native double readFloat32(double offset); - - public native double readFloat64(double offset); - - public native double readInt16(double offset); - - public native double readInt32(double offset); - - public native Long readInt64(double offset); - - public native double readInt8(double offset); - - public native double readUint16(double offset); - - public native double readUint32(double offset); - - public native Long readUint64(double offset); - - public native double readUint8(double offset); - - public native void setPosition(double position); - - public native void writeFloat32(double offset, double value); - - public native void writeFloat64(double offset, double value); - - public native void writeInt16(double offset, double value); - - public native void writeInt32(double offset, double value); - - public native void writeInt64(double offset, Long value); - - public native void writeInt8(double offset, double value); - - public native void writeUint16(double offset, double value); - - public native void writeUint32(double offset, double value); - - public native void writeUint64(double offset, Long value); - - public native void writeUint8(double offset, double value); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java deleted file mode 100644 index be269d6968a..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java +++ /dev/null @@ -1,13 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.flatbuffers; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType(isNative = true, name = "dhinternal.flatbuffers.Encoding", namespace = JsPackage.GLOBAL) -public class Encoding { - public static int UTF16_STRING, - UTF8_BYTES; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Long.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Long.java deleted file mode 100644 index e7551a82d7d..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Long.java +++ /dev/null @@ -1,25 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.flatbuffers; - -import jsinterop.annotations.JsMethod; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType(isNative = true, name = "dhinternal.flatbuffers.Long", namespace = JsPackage.GLOBAL) -public class Long { - public static Long ZERO; - - public static native Long create(double low, double high); - - public double high; - public double low; - - public Long(double low, double high) {} - - @JsMethod(name = "equals") - public native boolean equals_(Object other); - - public native double toFloat64(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Table.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Table.java deleted file mode 100644 index e2dcada04fb..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Table.java +++ /dev/null @@ -1,31 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.flatbuffers; - -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsProperty; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; -import jsinterop.base.JsPropertyMap; - -@JsType(isNative = true, name = "dhinternal.flatbuffers.Table", namespace = JsPackage.GLOBAL) -public interface Table { - @JsOverlay - static Table create() { - return Js.uncheckedCast(JsPropertyMap.of()); - } - - @JsProperty - ByteBuffer getBb(); - - @JsProperty - double getBb_pos(); - - @JsProperty - void setBb(ByteBuffer bb); - - @JsProperty - void setBb_pos(double bb_pos); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java deleted file mode 100644 index 9dc50f672a7..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java +++ /dev/null @@ -1,23 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType", - namespace = JsPackage.GLOBAL) -public class BarrageMessageType { - public static int BarragePublicationRequest, - BarrageSerializationOptions, - BarrageSnapshotRequest, - BarrageSubscriptionRequest, - BarrageUpdateMetadata, - NewSessionRequest, - None, - RefreshSessionRequest, - SessionInfoResponse; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java deleted file mode 100644 index 7de97b929cd..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java +++ /dev/null @@ -1,130 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper", - namespace = JsPackage.GLOBAL) -public class BarrageMessageWrapper { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateMsgPayloadVectorDataUnionType { - @JsOverlay - static BarrageMessageWrapper.CreateMsgPayloadVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addMagic(Builder builder, double magic); - - public static native void addMsgPayload(Builder builder, double msgPayloadOffset); - - public static native void addMsgType(Builder builder, int msgType); - - public static native double createBarrageMessageWrapper( - Builder builder, double magic, int msgType, double msgPayloadOffset); - - @Deprecated - public static native double createMsgPayloadVector( - Builder builder, BarrageMessageWrapper.CreateMsgPayloadVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createMsgPayloadVector(Builder builder, Int8Array data) { - return createMsgPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMsgPayloadVector(Builder builder, JsArray data) { - return createMsgPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMsgPayloadVector(Builder builder, Uint8Array data) { - return createMsgPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMsgPayloadVector(Builder builder, double[] data) { - return createMsgPayloadVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarrageMessageWrapper(Builder builder); - - public static native BarrageMessageWrapper getRootAsBarrageMessageWrapper( - ByteBuffer bb, BarrageMessageWrapper obj); - - public static native BarrageMessageWrapper getRootAsBarrageMessageWrapper(ByteBuffer bb); - - public static native BarrageMessageWrapper getSizePrefixedRootAsBarrageMessageWrapper( - ByteBuffer bb, BarrageMessageWrapper obj); - - public static native BarrageMessageWrapper getSizePrefixedRootAsBarrageMessageWrapper( - ByteBuffer bb); - - public static native void startBarrageMessageWrapper(Builder builder); - - public static native void startMsgPayloadVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageMessageWrapper __init(double i, ByteBuffer bb); - - public native double magic(); - - public native double msgPayload(double index); - - public native Int8Array msgPayloadArray(); - - public native double msgPayloadLength(); - - public native int msgType(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java deleted file mode 100644 index 076793a4be2..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java +++ /dev/null @@ -1,125 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageModColumnMetadata", - namespace = JsPackage.GLOBAL) -public class BarrageModColumnMetadata { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateModifiedRowsVectorDataUnionType { - @JsOverlay - static BarrageModColumnMetadata.CreateModifiedRowsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addModifiedRows(Builder builder, double modifiedRowsOffset); - - public static native double createBarrageModColumnMetadata( - Builder builder, double modifiedRowsOffset); - - @Deprecated - public static native double createModifiedRowsVector( - Builder builder, BarrageModColumnMetadata.CreateModifiedRowsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createModifiedRowsVector(Builder builder, Int8Array data) { - return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createModifiedRowsVector(Builder builder, JsArray data) { - return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createModifiedRowsVector(Builder builder, Uint8Array data) { - return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createModifiedRowsVector(Builder builder, double[] data) { - return createModifiedRowsVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarrageModColumnMetadata(Builder builder); - - public static native BarrageModColumnMetadata getRootAsBarrageModColumnMetadata( - ByteBuffer bb, BarrageModColumnMetadata obj); - - public static native BarrageModColumnMetadata getRootAsBarrageModColumnMetadata(ByteBuffer bb); - - public static native BarrageModColumnMetadata getSizePrefixedRootAsBarrageModColumnMetadata( - ByteBuffer bb, BarrageModColumnMetadata obj); - - public static native BarrageModColumnMetadata getSizePrefixedRootAsBarrageModColumnMetadata( - ByteBuffer bb); - - public static native void startBarrageModColumnMetadata(Builder builder); - - public static native void startModifiedRowsVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageModColumnMetadata __init(double i, ByteBuffer bb); - - public native double modifiedRows(double index); - - public native Int8Array modifiedRowsArray(); - - public native double modifiedRowsLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationOptions.java deleted file mode 100644 index 0726110f013..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationOptions.java +++ /dev/null @@ -1,42 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarragePublicationOptions", - namespace = JsPackage.GLOBAL) -public class BarragePublicationOptions { - public static native void addUseDeephavenNulls(Builder builder, boolean useDeephavenNulls); - - public static native double createBarragePublicationOptions( - Builder builder, boolean useDeephavenNulls); - - public static native double endBarragePublicationOptions(Builder builder); - - public static native BarragePublicationOptions getRootAsBarragePublicationOptions( - ByteBuffer bb, BarragePublicationOptions obj); - - public static native BarragePublicationOptions getRootAsBarragePublicationOptions(ByteBuffer bb); - - public static native BarragePublicationOptions getSizePrefixedRootAsBarragePublicationOptions( - ByteBuffer bb, BarragePublicationOptions obj); - - public static native BarragePublicationOptions getSizePrefixedRootAsBarragePublicationOptions( - ByteBuffer bb); - - public static native void startBarragePublicationOptions(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native BarragePublicationOptions __init(double i, ByteBuffer bb); - - public native boolean useDeephavenNulls(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationRequest.java deleted file mode 100644 index ee5fbe08fcc..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarragePublicationRequest.java +++ /dev/null @@ -1,125 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarragePublicationRequest", - namespace = JsPackage.GLOBAL) -public class BarragePublicationRequest { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateTicketVectorDataUnionType { - @JsOverlay - static BarragePublicationRequest.CreateTicketVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addPublishOptions(Builder builder, double publishOptionsOffset); - - public static native void addTicket(Builder builder, double ticketOffset); - - @Deprecated - public static native double createTicketVector( - Builder builder, BarragePublicationRequest.CreateTicketVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Int8Array data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, JsArray data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Uint8Array data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, double[] data) { - return createTicketVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarragePublicationRequest(Builder builder); - - public static native BarragePublicationRequest getRootAsBarragePublicationRequest( - ByteBuffer bb, BarragePublicationRequest obj); - - public static native BarragePublicationRequest getRootAsBarragePublicationRequest(ByteBuffer bb); - - public static native BarragePublicationRequest getSizePrefixedRootAsBarragePublicationRequest( - ByteBuffer bb, BarragePublicationRequest obj); - - public static native BarragePublicationRequest getSizePrefixedRootAsBarragePublicationRequest( - ByteBuffer bb); - - public static native void startBarragePublicationRequest(Builder builder); - - public static native void startTicketVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarragePublicationRequest __init(double i, ByteBuffer bb); - - public native BarragePublicationOptions publishOptions(); - - public native BarragePublicationOptions publishOptions(BarragePublicationOptions obj); - - public native double ticket(double index); - - public native Int8Array ticketArray(); - - public native double ticketLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotOptions.java deleted file mode 100644 index efedac5d190..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotOptions.java +++ /dev/null @@ -1,59 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSnapshotOptions", - namespace = JsPackage.GLOBAL) -public class BarrageSnapshotOptions { - public static native void addBatchSize(Builder builder, double batchSize); - - public static native void addColumnConversionMode( - Builder builder, int columnConversionMode); - - public static native void addMaxMessageSize(Builder builder, double maxMessageSize); - - public static native void addUseDeephavenNulls(Builder builder, boolean useDeephavenNulls); - - public static native double createBarrageSnapshotOptions( - Builder builder, - int columnConversionMode, - boolean useDeephavenNulls, - double batchSize, - double maxMessageSize); - - public static native double endBarrageSnapshotOptions(Builder builder); - - public static native BarrageSnapshotOptions getRootAsBarrageSnapshotOptions( - ByteBuffer bb, BarrageSnapshotOptions obj); - - public static native BarrageSnapshotOptions getRootAsBarrageSnapshotOptions(ByteBuffer bb); - - public static native BarrageSnapshotOptions getSizePrefixedRootAsBarrageSnapshotOptions( - ByteBuffer bb, BarrageSnapshotOptions obj); - - public static native BarrageSnapshotOptions getSizePrefixedRootAsBarrageSnapshotOptions( - ByteBuffer bb); - - public static native void startBarrageSnapshotOptions(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageSnapshotOptions __init(double i, ByteBuffer bb); - - public native double batchSize(); - - public native int columnConversionMode(); - - public native double maxMessageSize(); - - public native boolean useDeephavenNulls(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotRequest.java deleted file mode 100644 index 8e8d43e6c58..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSnapshotRequest.java +++ /dev/null @@ -1,287 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSnapshotRequest", - namespace = JsPackage.GLOBAL) -public class BarrageSnapshotRequest { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateColumnsVectorDataUnionType { - @JsOverlay - static BarrageSnapshotRequest.CreateColumnsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateTicketVectorDataUnionType { - @JsOverlay - static BarrageSnapshotRequest.CreateTicketVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateViewportVectorDataUnionType { - @JsOverlay - static BarrageSnapshotRequest.CreateViewportVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addColumns(Builder builder, double columnsOffset); - - public static native void addReverseViewport(Builder builder, boolean reverseViewport); - - public static native void addSnapshotOptions(Builder builder, double snapshotOptionsOffset); - - public static native void addTicket(Builder builder, double ticketOffset); - - public static native void addViewport(Builder builder, double viewportOffset); - - @Deprecated - public static native double createColumnsVector( - Builder builder, BarrageSnapshotRequest.CreateColumnsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, Int8Array data) { - return createColumnsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, JsArray data) { - return createColumnsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, Uint8Array data) { - return createColumnsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, double[] data) { - return createColumnsVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createTicketVector( - Builder builder, BarrageSnapshotRequest.CreateTicketVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Int8Array data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, JsArray data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Uint8Array data) { - return createTicketVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, double[] data) { - return createTicketVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createViewportVector( - Builder builder, BarrageSnapshotRequest.CreateViewportVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, Int8Array data) { - return createViewportVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, JsArray data) { - return createViewportVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, Uint8Array data) { - return createViewportVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, double[] data) { - return createViewportVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarrageSnapshotRequest(Builder builder); - - public static native BarrageSnapshotRequest getRootAsBarrageSnapshotRequest( - ByteBuffer bb, BarrageSnapshotRequest obj); - - public static native BarrageSnapshotRequest getRootAsBarrageSnapshotRequest(ByteBuffer bb); - - public static native BarrageSnapshotRequest getSizePrefixedRootAsBarrageSnapshotRequest( - ByteBuffer bb, BarrageSnapshotRequest obj); - - public static native BarrageSnapshotRequest getSizePrefixedRootAsBarrageSnapshotRequest( - ByteBuffer bb); - - public static native void startBarrageSnapshotRequest(Builder builder); - - public static native void startColumnsVector(Builder builder, double numElems); - - public static native void startTicketVector(Builder builder, double numElems); - - public static native void startViewportVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageSnapshotRequest __init(double i, ByteBuffer bb); - - public native double columns(double index); - - public native Int8Array columnsArray(); - - public native double columnsLength(); - - public native boolean reverseViewport(); - - public native BarrageSnapshotOptions snapshotOptions(); - - public native BarrageSnapshotOptions snapshotOptions(BarrageSnapshotOptions obj); - - public native double ticket(double index); - - public native Int8Array ticketArray(); - - public native double ticketLength(); - - public native double viewport(double index); - - public native Int8Array viewportArray(); - - public native double viewportLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionOptions.java deleted file mode 100644 index 9b6d39b53ec..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionOptions.java +++ /dev/null @@ -1,65 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions", - namespace = JsPackage.GLOBAL) -public class BarrageSubscriptionOptions { - public static native void addBatchSize(Builder builder, double batchSize); - - public static native void addColumnConversionMode( - Builder builder, int columnConversionMode); - - public static native void addMaxMessageSize(Builder builder, double maxMessageSize); - - public static native void addMinUpdateIntervalMs(Builder builder, double minUpdateIntervalMs); - - public static native void addUseDeephavenNulls(Builder builder, boolean useDeephavenNulls); - - public static native double createBarrageSubscriptionOptions( - Builder builder, - int columnConversionMode, - boolean useDeephavenNulls, - double minUpdateIntervalMs, - double batchSize, - double maxMessageSize); - - public static native double endBarrageSubscriptionOptions(Builder builder); - - public static native BarrageSubscriptionOptions getRootAsBarrageSubscriptionOptions( - ByteBuffer bb, BarrageSubscriptionOptions obj); - - public static native BarrageSubscriptionOptions getRootAsBarrageSubscriptionOptions( - ByteBuffer bb); - - public static native BarrageSubscriptionOptions getSizePrefixedRootAsBarrageSubscriptionOptions( - ByteBuffer bb, BarrageSubscriptionOptions obj); - - public static native BarrageSubscriptionOptions getSizePrefixedRootAsBarrageSubscriptionOptions( - ByteBuffer bb); - - public static native void startBarrageSubscriptionOptions(Builder builder); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageSubscriptionOptions __init(double i, ByteBuffer bb); - - public native double batchSize(); - - public native int columnConversionMode(); - - public native double maxMessageSize(); - - public native double minUpdateIntervalMs(); - - public native boolean useDeephavenNulls(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java deleted file mode 100644 index 807184ee34c..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java +++ /dev/null @@ -1,298 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest", - namespace = JsPackage.GLOBAL) -public class BarrageSubscriptionRequest { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateColumnsVectorDataUnionType { - @JsOverlay - static BarrageSubscriptionRequest.CreateColumnsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateTicketVectorDataUnionType { - @JsOverlay - static BarrageSubscriptionRequest.CreateTicketVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateViewportVectorDataUnionType { - @JsOverlay - static BarrageSubscriptionRequest.CreateViewportVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addColumns(Builder builder, double columnsOffset); - - public static native void addReverseViewport(Builder builder, boolean reverseViewport); - - public static native void addSubscriptionOptions( - Builder builder, double subscriptionOptionsOffset); - - public static native void addTicket(Builder builder, double ticketOffset); - - public static native void addViewport(Builder builder, double viewportOffset); - - @Deprecated - public static native double createColumnsVector( - Builder builder, BarrageSubscriptionRequest.CreateColumnsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, Int8Array data) { - return createColumnsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, JsArray data) { - return createColumnsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, Uint8Array data) { - return createColumnsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createColumnsVector(Builder builder, double[] data) { - return createColumnsVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createTicketVector( - Builder builder, BarrageSubscriptionRequest.CreateTicketVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Int8Array data) { - return createTicketVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, JsArray data) { - return createTicketVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, Uint8Array data) { - return createTicketVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createTicketVector(Builder builder, double[] data) { - return createTicketVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createViewportVector( - Builder builder, BarrageSubscriptionRequest.CreateViewportVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, Int8Array data) { - return createViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, JsArray data) { - return createViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, Uint8Array data) { - return createViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createViewportVector(Builder builder, double[] data) { - return createViewportVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarrageSubscriptionRequest(Builder builder); - - public static native BarrageSubscriptionRequest getRootAsBarrageSubscriptionRequest( - ByteBuffer bb, BarrageSubscriptionRequest obj); - - public static native BarrageSubscriptionRequest getRootAsBarrageSubscriptionRequest( - ByteBuffer bb); - - public static native BarrageSubscriptionRequest getSizePrefixedRootAsBarrageSubscriptionRequest( - ByteBuffer bb, BarrageSubscriptionRequest obj); - - public static native BarrageSubscriptionRequest getSizePrefixedRootAsBarrageSubscriptionRequest( - ByteBuffer bb); - - public static native void startBarrageSubscriptionRequest(Builder builder); - - public static native void startColumnsVector(Builder builder, double numElems); - - public static native void startTicketVector(Builder builder, double numElems); - - public static native void startViewportVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageSubscriptionRequest __init(double i, ByteBuffer bb); - - public native double columns(double index); - - public native Int8Array columnsArray(); - - public native double columnsLength(); - - public native boolean reverseViewport(); - - public native BarrageSubscriptionOptions subscriptionOptions(); - - public native BarrageSubscriptionOptions subscriptionOptions(BarrageSubscriptionOptions obj); - - public native double ticket(double index); - - public native Int8Array ticketArray(); - - public native double ticketLength(); - - public native double viewport(double index); - - public native Int8Array viewportArray(); - - public native double viewportLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java deleted file mode 100644 index 3d9f4bd51df..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java +++ /dev/null @@ -1,575 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata", - namespace = JsPackage.GLOBAL) -public class BarrageUpdateMetadata { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateAddedRowsIncludedVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateAddedRowsIncludedVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateAddedRowsVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateAddedRowsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateEffectiveColumnSetVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateEffectiveColumnSetVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateEffectiveViewportVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateEffectiveViewportVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateRemovedRowsVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateRemovedRowsVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateShiftDataVectorDataUnionType { - @JsOverlay - static BarrageUpdateMetadata.CreateShiftDataVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addAddedRows(Builder builder, double addedRowsOffset); - - public static native void addAddedRowsIncluded(Builder builder, double addedRowsIncludedOffset); - - public static native void addEffectiveColumnSet(Builder builder, double effectiveColumnSetOffset); - - public static native void addEffectiveReverseViewport( - Builder builder, boolean effectiveReverseViewport); - - public static native void addEffectiveViewport(Builder builder, double effectiveViewportOffset); - - public static native void addFirstSeq(Builder builder, Long firstSeq); - - public static native void addIsSnapshot(Builder builder, boolean isSnapshot); - - public static native void addLastSeq(Builder builder, Long lastSeq); - - public static native void addModColumnNodes(Builder builder, double modColumnNodesOffset); - - public static native void addRemovedRows(Builder builder, double removedRowsOffset); - - public static native void addShiftData(Builder builder, double shiftDataOffset); - - @Deprecated - public static native double createAddedRowsIncludedVector( - Builder builder, BarrageUpdateMetadata.CreateAddedRowsIncludedVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createAddedRowsIncludedVector(Builder builder, Int8Array data) { - return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsIncludedVector(Builder builder, JsArray data) { - return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsIncludedVector(Builder builder, Uint8Array data) { - return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsIncludedVector(Builder builder, double[] data) { - return createAddedRowsIncludedVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createAddedRowsVector( - Builder builder, BarrageUpdateMetadata.CreateAddedRowsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createAddedRowsVector(Builder builder, Int8Array data) { - return createAddedRowsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsVector(Builder builder, JsArray data) { - return createAddedRowsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsVector(Builder builder, Uint8Array data) { - return createAddedRowsVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createAddedRowsVector(Builder builder, double[] data) { - return createAddedRowsVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createBarrageUpdateMetadata( - Builder builder, - Long firstSeq, - Long lastSeq, - boolean isSnapshot, - double effectiveViewportOffset, - boolean effectiveReverseViewport, - double effectiveColumnSetOffset, - double addedRowsOffset, - double removedRowsOffset, - double shiftDataOffset, - double addedRowsIncludedOffset, - double modColumnNodesOffset); - - @Deprecated - public static native double createEffectiveColumnSetVector( - Builder builder, BarrageUpdateMetadata.CreateEffectiveColumnSetVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createEffectiveColumnSetVector(Builder builder, Int8Array data) { - return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveColumnSetVector(Builder builder, JsArray data) { - return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveColumnSetVector(Builder builder, Uint8Array data) { - return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveColumnSetVector(Builder builder, double[] data) { - return createEffectiveColumnSetVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createEffectiveViewportVector( - Builder builder, BarrageUpdateMetadata.CreateEffectiveViewportVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createEffectiveViewportVector(Builder builder, Int8Array data) { - return createEffectiveViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveViewportVector(Builder builder, JsArray data) { - return createEffectiveViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveViewportVector(Builder builder, Uint8Array data) { - return createEffectiveViewportVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createEffectiveViewportVector(Builder builder, double[] data) { - return createEffectiveViewportVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createModColumnNodesVector(Builder builder, JsArray data); - - @JsOverlay - public static final double createModColumnNodesVector(Builder builder, double[] data) { - return createModColumnNodesVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createRemovedRowsVector( - Builder builder, BarrageUpdateMetadata.CreateRemovedRowsVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createRemovedRowsVector(Builder builder, Int8Array data) { - return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createRemovedRowsVector(Builder builder, JsArray data) { - return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createRemovedRowsVector(Builder builder, Uint8Array data) { - return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createRemovedRowsVector(Builder builder, double[] data) { - return createRemovedRowsVector(builder, Js.>uncheckedCast(data)); - } - - @Deprecated - public static native double createShiftDataVector( - Builder builder, BarrageUpdateMetadata.CreateShiftDataVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createShiftDataVector(Builder builder, Int8Array data) { - return createShiftDataVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createShiftDataVector(Builder builder, JsArray data) { - return createShiftDataVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createShiftDataVector(Builder builder, Uint8Array data) { - return createShiftDataVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createShiftDataVector(Builder builder, double[] data) { - return createShiftDataVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endBarrageUpdateMetadata(Builder builder); - - public static native BarrageUpdateMetadata getRootAsBarrageUpdateMetadata( - ByteBuffer bb, BarrageUpdateMetadata obj); - - public static native BarrageUpdateMetadata getRootAsBarrageUpdateMetadata(ByteBuffer bb); - - public static native BarrageUpdateMetadata getSizePrefixedRootAsBarrageUpdateMetadata( - ByteBuffer bb, BarrageUpdateMetadata obj); - - public static native BarrageUpdateMetadata getSizePrefixedRootAsBarrageUpdateMetadata( - ByteBuffer bb); - - public static native void startAddedRowsIncludedVector(Builder builder, double numElems); - - public static native void startAddedRowsVector(Builder builder, double numElems); - - public static native void startBarrageUpdateMetadata(Builder builder); - - public static native void startEffectiveColumnSetVector(Builder builder, double numElems); - - public static native void startEffectiveViewportVector(Builder builder, double numElems); - - public static native void startModColumnNodesVector(Builder builder, double numElems); - - public static native void startRemovedRowsVector(Builder builder, double numElems); - - public static native void startShiftDataVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native BarrageUpdateMetadata __init(double i, ByteBuffer bb); - - public native double addedRows(double index); - - public native Int8Array addedRowsArray(); - - public native double addedRowsIncluded(double index); - - public native Int8Array addedRowsIncludedArray(); - - public native double addedRowsIncludedLength(); - - public native double addedRowsLength(); - - public native double effectiveColumnSet(double index); - - public native Int8Array effectiveColumnSetArray(); - - public native double effectiveColumnSetLength(); - - public native boolean effectiveReverseViewport(); - - public native double effectiveViewport(double index); - - public native Int8Array effectiveViewportArray(); - - public native double effectiveViewportLength(); - - public native Long firstSeq(); - - public native boolean isSnapshot(); - - public native Long lastSeq(); - - public native BarrageModColumnMetadata modColumnNodes(double index, BarrageModColumnMetadata obj); - - public native BarrageModColumnMetadata modColumnNodes(double index); - - public native double modColumnNodesLength(); - - public native double removedRows(double index); - - public native Int8Array removedRowsArray(); - - public native double removedRowsLength(); - - public native double shiftData(double index); - - public native Int8Array shiftDataArray(); - - public native double shiftDataLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java deleted file mode 100644 index 30a750f44a6..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java +++ /dev/null @@ -1,17 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode", - namespace = JsPackage.GLOBAL) -public class ColumnConversionMode { - public static int JavaSerialization, - Stringify, - ThrowError; -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java deleted file mode 100644 index a1f002c434a..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java +++ /dev/null @@ -1,125 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.NewSessionRequest", - namespace = JsPackage.GLOBAL) -public class NewSessionRequest { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreatePayloadVectorDataUnionType { - @JsOverlay - static NewSessionRequest.CreatePayloadVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addPayload(Builder builder, double payloadOffset); - - public static native void addProtocolVersion(Builder builder, double protocolVersion); - - public static native double createNewSessionRequest( - Builder builder, double protocolVersion, double payloadOffset); - - @Deprecated - public static native double createPayloadVector( - Builder builder, NewSessionRequest.CreatePayloadVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createPayloadVector(Builder builder, Int8Array data) { - return createPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createPayloadVector(Builder builder, JsArray data) { - return createPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createPayloadVector(Builder builder, Uint8Array data) { - return createPayloadVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createPayloadVector(Builder builder, double[] data) { - return createPayloadVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endNewSessionRequest(Builder builder); - - public static native NewSessionRequest getRootAsNewSessionRequest( - ByteBuffer bb, NewSessionRequest obj); - - public static native NewSessionRequest getRootAsNewSessionRequest(ByteBuffer bb); - - public static native NewSessionRequest getSizePrefixedRootAsNewSessionRequest( - ByteBuffer bb, NewSessionRequest obj); - - public static native NewSessionRequest getSizePrefixedRootAsNewSessionRequest(ByteBuffer bb); - - public static native void startNewSessionRequest(Builder builder); - - public static native void startPayloadVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native NewSessionRequest __init(double i, ByteBuffer bb); - - public native double payload(double index); - - public native Int8Array payloadArray(); - - public native double payloadLength(); - - public native double protocolVersion(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java deleted file mode 100644 index 3fcc2501206..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java +++ /dev/null @@ -1,121 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.RefreshSessionRequest", - namespace = JsPackage.GLOBAL) -public class RefreshSessionRequest { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateSessionVectorDataUnionType { - @JsOverlay - static RefreshSessionRequest.CreateSessionVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addSession(Builder builder, double sessionOffset); - - public static native double createRefreshSessionRequest(Builder builder, double sessionOffset); - - @Deprecated - public static native double createSessionVector( - Builder builder, RefreshSessionRequest.CreateSessionVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createSessionVector(Builder builder, Int8Array data) { - return createSessionVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionVector(Builder builder, JsArray data) { - return createSessionVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionVector(Builder builder, Uint8Array data) { - return createSessionVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionVector(Builder builder, double[] data) { - return createSessionVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endRefreshSessionRequest(Builder builder); - - public static native RefreshSessionRequest getRootAsRefreshSessionRequest( - ByteBuffer bb, RefreshSessionRequest obj); - - public static native RefreshSessionRequest getRootAsRefreshSessionRequest(ByteBuffer bb); - - public static native RefreshSessionRequest getSizePrefixedRootAsRefreshSessionRequest( - ByteBuffer bb, RefreshSessionRequest obj); - - public static native RefreshSessionRequest getSizePrefixedRootAsRefreshSessionRequest( - ByteBuffer bb); - - public static native void startRefreshSessionRequest(Builder builder); - - public static native void startSessionVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native RefreshSessionRequest __init(double i, ByteBuffer bb); - - public native double session(double index); - - public native Int8Array sessionArray(); - - public native double sessionLength(); -} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java deleted file mode 100644 index 39658a0f27b..00000000000 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java +++ /dev/null @@ -1,211 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.javascript.proto.dhinternal.io.deephaven.barrage.flatbuf.barrage_generated.io.deephaven.barrage.flatbuf; - -import elemental2.core.Int8Array; -import elemental2.core.JsArray; -import elemental2.core.Uint8Array; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Long; -import jsinterop.annotations.JsOverlay; -import jsinterop.annotations.JsPackage; -import jsinterop.annotations.JsType; -import jsinterop.base.Js; - -@JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.SessionInfoResponse", - namespace = JsPackage.GLOBAL) -public class SessionInfoResponse { - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateMetadataHeaderVectorDataUnionType { - @JsOverlay - static SessionInfoResponse.CreateMetadataHeaderVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) - public interface CreateSessionTokenVectorDataUnionType { - @JsOverlay - static SessionInfoResponse.CreateSessionTokenVectorDataUnionType of(Object o) { - return Js.cast(o); - } - - @JsOverlay - default Int8Array asInt8Array() { - return Js.cast(this); - } - - @JsOverlay - default JsArray asJsArray() { - return Js.cast(this); - } - - @JsOverlay - default Uint8Array asUint8Array() { - return Js.cast(this); - } - - @JsOverlay - default boolean isInt8Array() { - return (Object) this instanceof Int8Array; - } - - @JsOverlay - default boolean isJsArray() { - return (Object) this instanceof JsArray; - } - - @JsOverlay - default boolean isUint8Array() { - return (Object) this instanceof Uint8Array; - } - } - - public static native void addMetadataHeader(Builder builder, double metadataHeaderOffset); - - public static native void addSessionToken(Builder builder, double sessionTokenOffset); - - public static native void addTokenRefreshDeadlineMs(Builder builder, Long tokenRefreshDeadlineMs); - - @Deprecated - public static native double createMetadataHeaderVector( - Builder builder, SessionInfoResponse.CreateMetadataHeaderVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createMetadataHeaderVector(Builder builder, Int8Array data) { - return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMetadataHeaderVector(Builder builder, JsArray data) { - return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMetadataHeaderVector(Builder builder, Uint8Array data) { - return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createMetadataHeaderVector(Builder builder, double[] data) { - return createMetadataHeaderVector(builder, Js.>uncheckedCast(data)); - } - - public static native double createSessionInfoResponse( - Builder builder, - double metadataHeaderOffset, - double sessionTokenOffset, - Long tokenRefreshDeadlineMs); - - @Deprecated - public static native double createSessionTokenVector( - Builder builder, SessionInfoResponse.CreateSessionTokenVectorDataUnionType data); - - @JsOverlay - @Deprecated - public static final double createSessionTokenVector(Builder builder, Int8Array data) { - return createSessionTokenVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionTokenVector(Builder builder, JsArray data) { - return createSessionTokenVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionTokenVector(Builder builder, Uint8Array data) { - return createSessionTokenVector( - builder, Js.uncheckedCast(data)); - } - - @JsOverlay - @Deprecated - public static final double createSessionTokenVector(Builder builder, double[] data) { - return createSessionTokenVector(builder, Js.>uncheckedCast(data)); - } - - public static native double endSessionInfoResponse(Builder builder); - - public static native SessionInfoResponse getRootAsSessionInfoResponse( - ByteBuffer bb, SessionInfoResponse obj); - - public static native SessionInfoResponse getRootAsSessionInfoResponse(ByteBuffer bb); - - public static native SessionInfoResponse getSizePrefixedRootAsSessionInfoResponse( - ByteBuffer bb, SessionInfoResponse obj); - - public static native SessionInfoResponse getSizePrefixedRootAsSessionInfoResponse(ByteBuffer bb); - - public static native void startMetadataHeaderVector(Builder builder, double numElems); - - public static native void startSessionInfoResponse(Builder builder); - - public static native void startSessionTokenVector(Builder builder, double numElems); - - public ByteBuffer bb; - public double bb_pos; - - public native SessionInfoResponse __init(double i, ByteBuffer bb); - - public native double metadataHeader(double index); - - public native Int8Array metadataHeaderArray(); - - public native double metadataHeaderLength(); - - public native double sessionToken(double index); - - public native Int8Array sessionTokenArray(); - - public native double sessionTokenLength(); - - public native Long tokenRefreshDeadlineMs(); -} From cf9aee566bc502d0f3fb28a8b49b5c7a50c61f24 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Jan 2024 13:47:08 -0600 Subject: [PATCH 004/219] Add a note for follow up --- .../io/deephaven/web/client/api/barrage/WebBarrageUtils.java | 1 + 1 file changed, 1 insertion(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index a099b4c34a7..5b5f9a0b6c4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -220,6 +220,7 @@ public static ByteBuffer serializeRanges(Set rangeSets) { public static ByteBuffer typedArrayToAlignedLittleEndianByteBuffer(TypedArray data) { // Slice before wrapping to align contents + //TODO potentially only do this if not aligned already ByteBuffer bb = TypedArrayHelper.wrap(data.slice()); bb.order(ByteOrder.LITTLE_ENDIAN); return bb; From de5870605cab3a503b0f9b1e8138a7018bfcbd3b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 11 Jan 2024 12:32:50 -0600 Subject: [PATCH 005/219] Draft to get Chunks working in js --- .../java/io/deephaven/base/verify/Assert.java | 26 +---- .../io/deephaven/base/verify/Require.java | 42 ++------ .../base/verify/RequirementFailure.java | 5 + .../io/deephaven/util/MultiException.java | 2 + .../deephaven/util/type/ArrayTypeUtils.java | 10 +- .../io/deephaven/util/type/TypeUtils.java | 33 ++++-- .../main/resources/io/deephaven/Util.gwt.xml | 9 ++ .../io/deephaven/chunk/Chunk.gwt.xml | 5 + .../io/deephaven/util/QueryConstants.java | 3 + web/client-api/client-api.gradle | 3 + .../io/deephaven/web/DeephavenApi.gwt.xml | 4 + .../chunk/util/pools/MultiChunkPool.java | 100 ++++++++++++++++++ .../jetbrains/annotations/Annotations.gwt.xml | 3 + 13 files changed, 181 insertions(+), 64 deletions(-) create mode 100644 Util/src/main/resources/io/deephaven/Util.gwt.xml create mode 100644 engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java create mode 100644 web/client-api/src/main/resources/org/jetbrains/annotations/Annotations.gwt.xml diff --git a/Base/src/main/java/io/deephaven/base/verify/Assert.java b/Base/src/main/java/io/deephaven/base/verify/Assert.java index 4e01086ae14..3377b129dc3 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Assert.java +++ b/Base/src/main/java/io/deephaven/base/verify/Assert.java @@ -3,7 +3,6 @@ // package io.deephaven.base.verify; -import java.awt.EventQueue; import java.util.function.Consumer; // -------------------------------------------------------------------- @@ -322,6 +321,7 @@ public static AssertionFailure valueNeverOccurs(double d, String name) { // ---------------------------------------------------------------- /** assert (o != null && (current thread holds o's lock)) */ + @GwtIncompatible public static void holdsLock(Object o, String name) { neqNull(o, "o"); if (!Thread.holdsLock(o)) { @@ -331,6 +331,7 @@ public static void holdsLock(Object o, String name) { // ---------------------------------------------------------------- /** assert (o != null && !(current thread holds o's lock)) */ + @GwtIncompatible public static void notHoldsLock(Object o, String name) { neqNull(o, "o"); if (Thread.holdsLock(o)) { @@ -340,9 +341,10 @@ public static void notHoldsLock(Object o, String name) { // ################################################################ // instanceOf, notInstanceOf - +@interface GwtIncompatible {} // ---------------------------------------------------------------- /** assert (o instanceof type) */ + @GwtIncompatible public static void instanceOf(Object o, String name, Class type) { if (!type.isInstance(o)) { fail(name + " instanceof " + type, null == o ? ExceptionMessageUtil.valueAndName(o, name) @@ -352,6 +354,7 @@ public static void instanceOf(Object o, String name, Class type) { // ---------------------------------------------------------------- /** assert !(o instanceof type) */ + @GwtIncompatible public static void notInstanceOf(Object o, String name, Class type) { if (type.isInstance(o)) { fail("!(" + name + " instanceof " + type + ")", @@ -359,25 +362,6 @@ public static void notInstanceOf(Object o, String name, Class type) { } } - // ################################################################ - // isAWTThread, isNotAWTThread - - // ---------------------------------------------------------------- - /** assert (current thread is AWT Event Dispatch Thread) */ - public static void isAWTThread() { - if (!EventQueue.isDispatchThread()) { - fail("\"" + Thread.currentThread().getName() + "\".isAWTThread()"); - } - } - - // ---------------------------------------------------------------- - /** assert (current thread is AWT Event Dispatch Thread) */ - public static void isNotAWTThread() { - if (EventQueue.isDispatchThread()) { - fail("!\"" + Thread.currentThread().getName() + "\".isAWTThread()"); - } - } - // ################################################################ // eq (primitiveValue == primitiveValue) diff --git a/Base/src/main/java/io/deephaven/base/verify/Require.java b/Base/src/main/java/io/deephaven/base/verify/Require.java index 8b5dbef0961..6e564904a77 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Require.java +++ b/Base/src/main/java/io/deephaven/base/verify/Require.java @@ -5,9 +5,7 @@ import org.jetbrains.annotations.NotNull; -import java.awt.EventQueue; import java.util.Collection; -import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.function.Consumer; @@ -520,6 +518,7 @@ public static RequirementFailure valueNeverOccurs(double d, String name) { /** * require (o != null && (current thread holds o's lock)) */ + @GwtIncompatible public static void holdsLock(Object o, String name, int numCallsBelowRequirer) { neqNull(o, "o"); if (!Thread.holdsLock(o)) { @@ -527,6 +526,7 @@ public static void holdsLock(Object o, String name, int numCallsBelowRequirer) { } } + @GwtIncompatible public static void holdsLock(Object o, String name) { holdsLock(o, name, 1); } @@ -535,6 +535,7 @@ public static void holdsLock(Object o, String name) { /** * require (o != null && !(current thread holds o's lock)) */ + @GwtIncompatible public static void notHoldsLock(Object o, String name, int numCallsBelowRequirer) { neqNull(o, "o"); if (Thread.holdsLock(o)) { @@ -542,10 +543,12 @@ public static void notHoldsLock(Object o, String name, int numCallsBelowRequirer } } + @GwtIncompatible public static void notHoldsLock(Object o, String name) { notHoldsLock(o, name, 1); } + @interface GwtIncompatible {} // ################################################################ // instanceOf, notInstanceOf @@ -554,6 +557,7 @@ public static void notHoldsLock(Object o, String name) { /** * require (o instanceof type) */ + @GwtIncompatible public static void instanceOf(Object o, String name, Class type, int numCallsBelowRequirer) { if (!type.isInstance(o)) { fail(name + " instanceof " + type, null == o ? ExceptionMessageUtil.valueAndName(o, name) @@ -562,6 +566,7 @@ public static void instanceOf(Object o, String name, Class type, int numC } } + @GwtIncompatible public static void instanceOf(Object o, String name, Class type) { instanceOf(o, name, type, 1); } @@ -570,6 +575,7 @@ public static void instanceOf(Object o, String name, Class type) { /** * require !(o instanceof type) */ + @GwtIncompatible public static void notInstanceOf(Object o, String name, Class type, int numCallsBelowRequirer) { if (type.isInstance(o)) { fail("!(" + name + " instanceof " + type + ")", @@ -578,41 +584,11 @@ public static void notInstanceOf(Object o, String name, Class type, int n } } + @GwtIncompatible public static void notInstanceOf(Object o, String name, Class type) { notInstanceOf(o, name, type, 1); } - // ################################################################ - // isAWTThread, isNotAWTThread - - // ---------------------------------------------------------------- - /** - * require (current thread is AWT Event Dispatch Thread) - */ - public static void isAWTThread() { - isAWTThread(1); - } - - public static void isAWTThread(int numCallsBelowRequirer) { - if (!EventQueue.isDispatchThread()) { - fail("\"" + Thread.currentThread().getName() + "\".isAWTThread()", numCallsBelowRequirer + 1); - } - } - - // ---------------------------------------------------------------- - /** - * require (current thread is AWT Event Dispatch Thread) - */ - public static void isNotAWTThread() { - isNotAWTThread(1); - } - - public static void isNotAWTThread(int numCallsBelowRequirer) { - if (EventQueue.isDispatchThread()) { - fail("!\"" + Thread.currentThread().getName() + "\".isAWTThread()", numCallsBelowRequirer + 1); - } - } - // ################################################################ // eq (primitiveValue == primitiveValue) diff --git a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java index bfdb2caf4ce..21d34bf5dc6 100644 --- a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java +++ b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java @@ -48,12 +48,15 @@ public void printStackTrace() { // ---------------------------------------------------------------- @Override + @GwtIncompatible public void printStackTrace(PrintStream s) { s.print(getFixedStackTrace()); } + @interface GwtIncompatible {} // ---------------------------------------------------------------- @Override + @GwtIncompatible public void printStackTrace(PrintWriter s) { s.print(getFixedStackTrace()); } @@ -62,6 +65,7 @@ public void printStackTrace(PrintWriter s) { /** * Gets a stack trace with a line added identifying the offending stack frame. */ + @GwtIncompatible private StringBuffer getFixedStackTrace() { StringBuffer sb = getOriginalStackTrace(); @@ -80,6 +84,7 @@ private StringBuffer getFixedStackTrace() { /** * Gets the unmodified stack trace, instead of the one with the culprit identified. */ + @GwtIncompatible public StringBuffer getOriginalStackTrace() { StringWriter stringWriter = new StringWriter(); PrintWriter printWriter = new PrintWriter(stringWriter); diff --git a/Util/src/main/java/io/deephaven/util/MultiException.java b/Util/src/main/java/io/deephaven/util/MultiException.java index f2dcc13a90c..0b5d479cd83 100644 --- a/Util/src/main/java/io/deephaven/util/MultiException.java +++ b/Util/src/main/java/io/deephaven/util/MultiException.java @@ -84,7 +84,9 @@ public Throwable[] getCauses() { return causes; } + @interface GwtIncompatible {} @Override + @GwtIncompatible public void printStackTrace(PrintWriter s) { super.printStackTrace(s); for (int i = 0; i < causes.length; i++) { diff --git a/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java b/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java index c2d03f75a04..7f0e57c9211 100644 --- a/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java @@ -53,6 +53,7 @@ public static ArrayAccessor getArrayAccessor(Object array) { } } + @GwtIncompatible public static ArrayAccessor createArrayAccessor(Object element, int size) { if (element == null) { return new ObjectArrayAccessor(new Object[size]); @@ -127,6 +128,7 @@ public static short[] shortNullArray(int size) { return result; } + @GwtIncompatible public static Object toArray(Collection objects, Class elementType) { if (elementType == boolean.class) { elementType = Boolean.class; @@ -140,6 +142,7 @@ public static Object toArray(Collection objects, Class elementType) { return result; } + @GwtIncompatible public static Object boxedToPrimitive(Set objects, Class type) { Iterator it = objects.iterator(); if (objects.isEmpty()) { @@ -161,6 +164,7 @@ public static Object boxedToPrimitive(Set objects, Class type) { return resultAccessor.getArray(); } + @GwtIncompatible public static ArrayAccessor getArrayAccessorFromArray(Object arrayPrototype, int size) { final Class c = arrayPrototype.getClass(); if (c.equals(boolean[].class)) { @@ -184,6 +188,7 @@ public static ArrayAccessor getArrayAccessorFromArray(Object arrayPrototype, int } } + @GwtIncompatible public static Object toArray(Collection objects) { if (objects.size() == 0) { return toArray(objects, Object.class); @@ -197,6 +202,7 @@ public static Object toArray(Collection objects) { return toArray(objects, (ubType == null ? prototype.getClass() : ubType)); } + @GwtIncompatible public static ArrayAccessor getAccessorForElementType(Class componentType, int size) { if (componentType.equals(boolean.class) || componentType.equals(Boolean.class)) { return new BooleanArrayAccessor(booleanNullArray(size)); @@ -446,7 +452,8 @@ public static Object[] getBoxedArray(Object value) { return (Object[]) value; } } - + @interface GwtIncompatible {} + @GwtIncompatible public static boolean equals(Object actualValue, Object expectedValue) { final Class ct = actualValue.getClass().getComponentType(); if (Object.class.isAssignableFrom(ct)) { @@ -469,6 +476,7 @@ public static boolean equals(Object actualValue, Object expectedValue) { return false; } + @GwtIncompatible public static String toString(Object actualValue) { final Class ct = actualValue.getClass().getComponentType(); if (Object.class.isAssignableFrom(ct)) { diff --git a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java index 89cfd4e152c..4b4b9251b08 100644 --- a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java @@ -389,6 +389,7 @@ public static boolean isPrimitiveNumeric(@NotNull final Class c) { * @param c class * @return true if Number.class is assignable from {@code c}, false otherwise */ + @GwtIncompatible public static boolean isBoxedNumeric(@NotNull final Class c) { return Number.class.isAssignableFrom(c); } @@ -410,7 +411,7 @@ public static boolean isPrimitiveChar(@NotNull final Class c) { * @return true if Character.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedChar(@NotNull final Class c) { - return Character.class.isAssignableFrom(c); + return Character.class.equals(c); } /** @@ -420,7 +421,7 @@ public static boolean isBoxedChar(@NotNull final Class c) { * @return true if Integer.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedInteger(@NotNull final Class c) { - return Integer.class.isAssignableFrom(c); + return Integer.class.equals(c); } /** @@ -430,7 +431,7 @@ public static boolean isBoxedInteger(@NotNull final Class c) { * @return true if Long.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedLong(@NotNull final Class c) { - return Long.class.isAssignableFrom(c); + return Long.class.equals(c); } /** @@ -440,7 +441,7 @@ public static boolean isBoxedLong(@NotNull final Class c) { * @return true if Short.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedShort(@NotNull final Class c) { - return Short.class.isAssignableFrom(c); + return Short.class.equals(c); } /** @@ -450,7 +451,7 @@ public static boolean isBoxedShort(@NotNull final Class c) { * @return true if Float.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedFloat(@NotNull final Class c) { - return Float.class.isAssignableFrom(c); + return Float.class.equals(c); } /** @@ -460,7 +461,7 @@ public static boolean isBoxedFloat(@NotNull final Class c) { * @return true if Double.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedDouble(@NotNull final Class c) { - return Double.class.isAssignableFrom(c); + return Double.class.equals(c); } /** @@ -470,7 +471,7 @@ public static boolean isBoxedDouble(@NotNull final Class c) { * @return true if Byte.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedByte(@NotNull final Class c) { - return Byte.class.isAssignableFrom(c); + return Byte.class.equals(c); } /** @@ -490,7 +491,7 @@ public static boolean isBoxedArithmetic(@NotNull final Class c) { * @return true if Boolean.class is assignable from {@code c}, false otherwise */ public static boolean isBoxedBoolean(@NotNull final Class c) { - return Boolean.class.isAssignableFrom(c); + return Boolean.class.equals(c); } /** @@ -499,6 +500,7 @@ public static boolean isBoxedBoolean(@NotNull final Class c) { * @param c class * @return true if {@code c} is numeric, false otherwise */ + @GwtIncompatible public static boolean isNumeric(@NotNull final Class c) { return isPrimitiveNumeric(c) || isBoxedNumeric(c); } @@ -519,6 +521,7 @@ public static boolean isCharacter(@NotNull final Class c) { * @param type The class. * @return true if the type is a DateTime, {@link java.time.ZonedDateTime} or {@link Instant}. */ + @GwtIncompatible public static boolean isDateTime(Class type) { return Instant.class.isAssignableFrom(type) || ZonedDateTime.class.isAssignableFrom(type) @@ -532,7 +535,7 @@ public static boolean isDateTime(Class type) { * @return true if the type is a String, false otherwise */ public static boolean isString(Class type) { - return String.class.isAssignableFrom(type); + return String.class.equals(type); } /** @@ -541,6 +544,7 @@ public static boolean isString(Class type) { * @param type the class * @return true if the type is BigInteger or BigDecimal, false otherwise */ + @GwtIncompatible public static boolean isBigNumeric(Class type) { return BigInteger.class.isAssignableFrom(type) || BigDecimal.class.isAssignableFrom(type); } @@ -564,6 +568,7 @@ public static boolean isFloatType(Class type) { * @return a String representation of the object, null if it cannot be converted * @throws IOException if an IO error occurs during conversion */ + @GwtIncompatible public static String objectToString(Object o) throws IOException { if (o == null) { return null; @@ -592,6 +597,7 @@ public static String objectToString(Object o) throws IOException { * @throws RuntimeException if the string fails to parse * @throws IOException if an IO error occurs during conversion */ + @GwtIncompatible public static Optional fromString(String string, String typeString) throws IOException { final Class type; try { @@ -613,6 +619,7 @@ public static Optional fromString(String string, String typeString) thro * @throws RuntimeException if the string fails to parse * @throws IOException if an IO error occurs during conversion */ + @GwtIncompatible public static Object fromString(String string, Class type) throws IOException { final Class boxedType = getBoxedType(type); try { @@ -658,6 +665,7 @@ public static Object fromString(String string, Class type) throws IOException * @return the base64 encoded string * @throws IOException if the string cannot be encoded */ + @GwtIncompatible public static String encode64Serializable(Serializable serializable) throws IOException { try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream os = new ObjectOutputStream(bos)) { @@ -674,6 +682,7 @@ public static String encode64Serializable(Serializable serializable) throws IOEx * @throws IOException if the string cannot be decoded * @throws ClassNotFoundException if the Object type is unknown */ + @GwtIncompatible public static Object decode64Serializable(String string) throws IOException, ClassNotFoundException { try (ObjectInputStream is = new ObjectInputStream(new ByteArrayInputStream(Base64.getDecoder().decode(string)))) { @@ -684,6 +693,7 @@ public static Object decode64Serializable(String string) throws IOException, Cla /** * Determine the Class from the Type. */ + @GwtIncompatible public static Class getErasedType(Type paramType) { if (paramType instanceof Class) { return (Class) paramType; @@ -718,6 +728,7 @@ public static Class getErasedType(Type paramType) { } } + @interface GwtIncompatible {} /** * Determine the weakest parent of the two provided Classes. * @@ -725,6 +736,7 @@ public static Class getErasedType(Type paramType) { * @param two the other class to compare * @return the weakest parent Class */ + @GwtIncompatible private static Class getWeakest(Class one, Class two) { if (one.isAssignableFrom(two)) { return one; @@ -749,6 +761,7 @@ private static Class getWeakest(Class one, Class two) { return strongest; } + @GwtIncompatible private static Set> getFlattenedInterfaces(Class cls) { final Set> set = new HashSet<>(); while (cls != null && cls != Object.class) { @@ -760,6 +773,7 @@ private static Set> getFlattenedInterfaces(Class cls) { return set; } + @GwtIncompatible private static void collectInterfaces(final Collection> into, final Class cls) { if (into.add(cls)) { for (final Class iface : cls.getInterfaces()) { @@ -771,6 +785,7 @@ private static void collectInterfaces(final Collection> into, final Cla } + @GwtIncompatible public static Class classForName(String className) throws ClassNotFoundException { Class result = primitiveClassNameToClass.get(className); if (result == null) { diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml new file mode 100644 index 00000000000..b7c6a6bbfd8 --- /dev/null +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml new file mode 100644 index 00000000000..d786d2e3976 --- /dev/null +++ b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java b/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java index fc25c64d5c1..0dcb1cd4a6a 100644 --- a/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java +++ b/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java @@ -187,6 +187,8 @@ private QueryConstants() {} /** * Minimum finite value of type float. */ + @interface GwtIncompatible {} + @GwtIncompatible public static final float MIN_FINITE_FLOAT = Math.nextUp(-Float.MAX_VALUE); /** @@ -241,6 +243,7 @@ private QueryConstants() {} /** * Minimum finite value of type double. */ + @GwtIncompatible public static final double MIN_FINITE_DOUBLE = Math.nextUp(-Double.MAX_VALUE); /** diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 2a28a71bc3f..dad6ccb6cea 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -22,6 +22,9 @@ repositories { mavenLocal() } dependencies { + implementation project(':engine-chunk') + implementation 'org.jetbrains:annotations:13.0' +// implementation 'org.jetbrains:annotations:13.0:sources' implementation project(':web-shared-beans') implementation project(':web-client-backplane') diff --git a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml index 307ea03bb6a..a066490dbd5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml +++ b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml @@ -6,6 +6,10 @@ + + + + diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java new file mode 100644 index 00000000000..0469a3e8f47 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java @@ -0,0 +1,100 @@ +/** + * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.chunk.util.pools; + +import io.deephaven.chunk.ChunkType; +import org.jetbrains.annotations.NotNull; + +import java.util.Collections; +import java.util.EnumMap; +import java.util.Map; + +/** + * Provides a set of per-type {@link ChunkPool}s. Normally accessed via a {@link ThreadLocal}, to allow some threads to + * share a common pool and others to allocate their own. + */ +public final class MultiChunkPool { + + private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); + private static final ThreadLocal POOL_THREAD_LOCAL = ThreadLocal.withInitial(() -> SHARED_POOL); + + public static void enableDedicatedPoolForThisThread() { + if (POOL_THREAD_LOCAL.get() == SHARED_POOL) { + POOL_THREAD_LOCAL.set(new MultiChunkPool()); + } + } + + public static MultiChunkPool forThisThread() { + return POOL_THREAD_LOCAL.get(); + } + + private final BooleanChunkPool booleanChunkPool = new BooleanChunkPool(); + private final CharChunkPool charChunkPool = new CharChunkPool(); + private final ByteChunkPool byteChunkPool = new ByteChunkPool(); + private final ShortChunkPool shortChunkPool = new ShortChunkPool(); + private final IntChunkPool intChunkPool = new IntChunkPool(); + private final LongChunkPool longChunkPool = new LongChunkPool(); + private final FloatChunkPool floatChunkPool = new FloatChunkPool(); + private final DoubleChunkPool doubleChunkPool = new DoubleChunkPool(); + private final ObjectChunkPool objectChunkPool = new ObjectChunkPool(); + + private final Map pools; + + { + final EnumMap tempPools = new EnumMap<>(ChunkType.class); + tempPools.put(ChunkType.Boolean, booleanChunkPool); + tempPools.put(ChunkType.Char, charChunkPool); + tempPools.put(ChunkType.Byte, byteChunkPool); + tempPools.put(ChunkType.Short, shortChunkPool); + tempPools.put(ChunkType.Int, intChunkPool); + tempPools.put(ChunkType.Long, longChunkPool); + tempPools.put(ChunkType.Float, floatChunkPool); + tempPools.put(ChunkType.Double, doubleChunkPool); + tempPools.put(ChunkType.Object, objectChunkPool); + pools = Collections.unmodifiableMap(tempPools); + } + + private MultiChunkPool() {} + + @SuppressWarnings("unused") + public ChunkPool getChunkPool(@NotNull final ChunkType chunkType) { + return pools.get(chunkType); + } + + public BooleanChunkPool getBooleanChunkPool() { + return booleanChunkPool; + } + + public CharChunkPool getCharChunkPool() { + return charChunkPool; + } + + public ByteChunkPool getByteChunkPool() { + return byteChunkPool; + } + + public ShortChunkPool getShortChunkPool() { + return shortChunkPool; + } + + public IntChunkPool getIntChunkPool() { + return intChunkPool; + } + + public LongChunkPool getLongChunkPool() { + return longChunkPool; + } + + public FloatChunkPool getFloatChunkPool() { + return floatChunkPool; + } + + public DoubleChunkPool getDoubleChunkPool() { + return doubleChunkPool; + } + + public ObjectChunkPool getObjectChunkPool() { + return objectChunkPool; + } +} diff --git a/web/client-api/src/main/resources/org/jetbrains/annotations/Annotations.gwt.xml b/web/client-api/src/main/resources/org/jetbrains/annotations/Annotations.gwt.xml new file mode 100644 index 00000000000..ec945398599 --- /dev/null +++ b/web/client-api/src/main/resources/org/jetbrains/annotations/Annotations.gwt.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file From 0252984801919f68a8e183218da8c2dd5aecdde8 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 12 Jan 2024 10:01:36 -0600 Subject: [PATCH 006/219] Start adapting BSR for web --- .../client/api/barrage/WebBarrageMessage.java | 46 ++ .../api/barrage/WebBarrageStreamReader.java | 441 ++++++++++++++++++ .../client/api/barrage/WebBarrageUtils.java | 6 +- 3 files changed, 490 insertions(+), 3 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java new file mode 100644 index 00000000000..efadd81aa5d --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java @@ -0,0 +1,46 @@ +package io.deephaven.web.client.api.barrage; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; + +import java.util.ArrayList; +import java.util.BitSet; + +public class WebBarrageMessage { + public static class ModColumnData { + public RangeSet rowsModified; + public Class type; + public Class componentType; + public ArrayList> data; + public ChunkType chunkType; + } + public static class AddColumnData { + public Class type; + public Class componentType; + public ArrayList> data; + public ChunkType chunkType; + } + + public long firstSeq = -1; + public long lastSeq = -1; + public long step = -1; + + public boolean isSnapshot; + public RangeSet snapshotRowSet; + public boolean snapshotRowSetIsReversed; + public BitSet snapshotColumns; + + public RangeSet rowsAdded; + public RangeSet rowsIncluded; + public RangeSet rowsRemoved; + public ShiftedRange[] shifted; + + public AddColumnData[] addColumnData; + public ModColumnData[] modColumnData; + + // Underlying RecordBatch.length, visible for reading snapshots + public long length; +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java new file mode 100644 index 00000000000..4bf52878cd3 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -0,0 +1,441 @@ +package io.deephaven.web.client.api.barrage; + +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.util.type.TypeUtils; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.RecordBatch; +import org.gwtproject.nio.TypedArrayHelper; + +import java.io.DataInput; +import java.io.IOException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.BitSet; +import java.util.Iterator; + +/** + * Consumes FlightData fields from Flight/Barrage producers and builds + * browser-compatible WebBarrageMessage payloads that can be used to + * maintain table data. + */ +public class WebBarrageStreamReader { + private static final int MAX_CHUNK_SIZE = Integer.MAX_VALUE - 8; + + + // record progress in reading + private long numAddRowsRead = 0; + private long numAddRowsTotal = 0; + private long numModRowsRead = 0; + private long numModRowsTotal = 0; + + // hold in-progress messages that aren't finished being built + private WebBarrageMessage msg; + + public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, + FlightData flightData) { + ByteBuffer headerAsBB = TypedArrayHelper.wrap(flightData.getDataHeader_asU8()); + Message header = headerAsBB.hasRemaining() ? Message.getRootAsMessage(headerAsBB) : null; + + ByteBuffer msgAsBB = TypedArrayHelper.wrap(flightData.getAppMetadata_asU8()); + if (msgAsBB.hasRemaining()) { + BarrageMessageWrapper wrapper = + BarrageMessageWrapper.getRootAsBarrageMessageWrapper(msgAsBB); + if (wrapper.magic() != WebBarrageUtils.FLATBUFFER_MAGIC) { + //TODO warn + } else if (wrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata) { + if (msg != null) { + throw new IllegalStateException( + "Previous message was not complete; pending " + (numAddRowsTotal - numAddRowsRead) + + " add rows and " + (numModRowsTotal - numModRowsRead) + " mod rows"); + } + + final BarrageUpdateMetadata metadata = + BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata(wrapper.msgPayloadAsByteBuffer()); + + msg = new WebBarrageMessage(); + + msg.isSnapshot = metadata.isSnapshot(); + msg.snapshotRowSetIsReversed = metadata.effectiveReverseViewport(); + + numAddRowsRead = 0; + numModRowsRead = 0; + + if (msg.isSnapshot) { + final ByteBuffer effectiveViewport = metadata.effectiveViewportAsByteBuffer(); + if (effectiveViewport != null) { + msg.snapshotRowSet = extractIndex(effectiveViewport); + } + final ByteBuffer effectiveSnapshotColumns = metadata.effectiveColumnSetAsByteBuffer(); + if (effectiveSnapshotColumns != null) { + msg.snapshotColumns = extractBitSet(effectiveSnapshotColumns); + } + } + + msg.firstSeq = metadata.firstSeq(); + msg.lastSeq = metadata.lastSeq(); + msg.rowsAdded = extractIndex(metadata.addedRowsAsByteBuffer()); + msg.rowsRemoved = extractIndex(metadata.removedRowsAsByteBuffer()); + msg.shifted = extractIndexShiftData(metadata.shiftDataAsByteBuffer()); + + final ByteBuffer rowsIncluded = metadata.addedRowsIncludedAsByteBuffer(); + msg.rowsIncluded = rowsIncluded != null ? extractIndex(rowsIncluded) : msg.rowsAdded; + msg.addColumnData = new WebBarrageMessage.AddColumnData[columnTypes.length]; + for (int ci = 0; ci < msg.addColumnData.length; ++ci) { + msg.addColumnData[ci] = new WebBarrageMessage.AddColumnData(); + msg.addColumnData[ci].type = columnTypes[ci]; + msg.addColumnData[ci].componentType = componentTypes[ci]; + msg.addColumnData[ci].data = new ArrayList<>(); + + // create an initial chunk of the correct size + final int chunkSize = (int) (Math.min(msg.rowsIncluded.size(), MAX_CHUNK_SIZE)); + final WritableChunk chunk = columnChunkTypes[ci].makeWritableChunk(chunkSize); + chunk.setSize(0); + msg.addColumnData[ci].data.add(chunk); + } + numAddRowsTotal = msg.rowsIncluded.size(); + + // if this message is a snapshot response (vs. subscription) then mod columns may be empty + numModRowsTotal = 0; + msg.modColumnData = new WebBarrageMessage.ModColumnData[metadata.modColumnNodesLength()]; + for (int ci = 0; ci < msg.modColumnData.length; ++ci) { + msg.modColumnData[ci] = new WebBarrageMessage.ModColumnData(); + msg.modColumnData[ci].type = columnTypes[ci]; + msg.modColumnData[ci].componentType = componentTypes[ci]; + msg.modColumnData[ci].data = new ArrayList<>(); + + final BarrageModColumnMetadata mcd = metadata.modColumnNodes(ci); + msg.modColumnData[ci].rowsModified = extractIndex(mcd.modifiedRowsAsByteBuffer()); + + // create an initial chunk of the correct size + final int chunkSize = (int) (Math.min(msg.modColumnData[ci].rowsModified.size(), + MAX_CHUNK_SIZE)); + final WritableChunk chunk = columnChunkTypes[ci].makeWritableChunk(chunkSize); + chunk.setSize(0); + msg.modColumnData[ci].data.add(chunk); + + numModRowsTotal = Math.max(numModRowsTotal, msg.modColumnData[ci].rowsModified.size()); + } + } + } + ByteBuffer body = TypedArrayHelper.wrap(flightData.getDataBody_asU8()); + if (!body.hasRemaining()) { + throw new IllegalStateException("Missing body tag"); + } + if (header == null) { + throw new IllegalStateException("Missing metadata header; cannot decode body"); + } + + if (header.headerType() != MessageHeader.RecordBatch) { + throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); + } + + // throw an error when no app metadata (snapshots now provide by default) + if (msg == null) { + throw new IllegalStateException( + "Missing app metadata tag; cannot decode using BarrageStreamReader"); + } + + final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); + msg.length = batch.length(); + + final Iterator fieldNodeIter = + new FlatBufferIteratorAdapter<>(batch.nodesLength(), + i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); + + final TLongArrayList bufferInfo = new TLongArrayList(batch.buffersLength()); + for (int bi = 0; bi < batch.buffersLength(); ++bi) { + int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); + int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); + if (bi < batch.buffersLength() - 1) { + final int nextOffset = + LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); + // our parsers handle overhanging buffers + length += Math.max(0, nextOffset - offset - length); + } + bufferInfo.add(length); + } + final TLongIterator bufferInfoIter = bufferInfo.iterator(); + + + // add and mod rows are never combined in a batch. all added rows must be received before the first + // mod rows will be received. + if (numAddRowsRead < numAddRowsTotal) { + for (int ci = 0; ci < msg.addColumnData.length; ++ci) { + final WebBarrageMessage.AddColumnData acd = msg.addColumnData[ci]; + + final long remaining = numAddRowsTotal - numAddRowsRead; + if (batch.length() > remaining) { + throw new IllegalStateException( + "Batch length exceeded the expected number of rows from app metadata"); + } + + // select the current chunk size and read the size + int lastChunkIndex = acd.data.size() - 1; + WritableChunk chunk = (WritableChunk) acd.data.get(lastChunkIndex); + + if (batch.length() > chunk.capacity() - chunk.size()) { + // reading the rows from this batch will overflow the existing chunk; create a new one + final int chunkSize = (int) (Math.min(remaining, MAX_CHUNK_SIZE)); + chunk = columnChunkTypes[ci].makeWritableChunk(chunkSize); + acd.data.add(chunk); + + chunk.setSize(0); + ++lastChunkIndex; + } + + // fill the chunk with data and assign back into the array + acd.data.set(lastChunkIndex, + ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunk, chunk.size(), (int) batch.length())); + chunk.setSize(chunk.size() + (int) batch.length()); + } + numAddRowsRead += batch.length(); + } else { + for (int ci = 0; ci < msg.modColumnData.length; ++ci) { + final WebBarrageMessage.ModColumnData mcd = msg.modColumnData[ci]; + + // another column may be larger than this column + long remaining = Math.max(0, mcd.rowsModified.size() - numModRowsRead); + + // need to add the batch row data to the column chunks + int lastChunkIndex = mcd.data.size() - 1; + WritableChunk chunk = (WritableChunk) mcd.data.get(lastChunkIndex); + + final int numRowsToRead = LongSizedDataStructure.intSize("BarrageStreamReader", + Math.min(remaining, batch.length())); + if (numRowsToRead > chunk.capacity() - chunk.size()) { + // reading the rows from this batch will overflow the existing chunk; create a new one + final int chunkSize = (int) (Math.min(remaining, MAX_CHUNK_SIZE)); + chunk = columnChunkTypes[ci].makeWritableChunk(chunkSize); + mcd.data.add(chunk); + + chunk.setSize(0); + ++lastChunkIndex; + } + + // fill the chunk with data and assign back into the array + mcd.data.set(lastChunkIndex, + ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunk, chunk.size(), numRowsToRead)); + chunk.setSize(chunk.size() + numRowsToRead); + } + numModRowsRead += batch.length(); + } + + + if (header.headerType() == MessageHeader.Schema) { + // there is no body and our clients do not want to see schema messages + return null; + } + + if (numAddRowsRead == numAddRowsTotal && numModRowsRead == numModRowsTotal) { + final WebBarrageMessage retval = msg; + msg = null; + return retval; + } + + // otherwise, must wait for more data + return null; + } + + static WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final ChunkType chunkType, final Class type, final Class componentType, + final Iterator fieldNodeIter, + final TLongIterator bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { + return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, is, + outChunk, offset, totalRows); + } + + static WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final int factor, + final ChunkType chunkType, final Class type, final Class componentType, + final Iterator fieldNodeIter, + final TLongIterator bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { + switch (chunkType) { + case Boolean: + throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); + case Char: + return CharChunkInputStreamGenerator.extractChunkFromInputStream( + Character.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Byte: + if (type == Boolean.class || type == boolean.class) { + return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return ByteChunkInputStreamGenerator.extractChunkFromInputStream( + Byte.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Short: + return ShortChunkInputStreamGenerator.extractChunkFromInputStream( + Short.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Int: + return IntChunkInputStreamGenerator.extractChunkFromInputStream( + Integer.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Long: + if (factor == 1) { + return LongChunkInputStreamGenerator.extractChunkFromInputStream( + Long.BYTES, options, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + Long.BYTES, options, + (long v) -> (v*factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Float: + return FloatChunkInputStreamGenerator.extractChunkFromInputStream( + Float.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Double: + return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + Double.BYTES, options,fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Object: + if (type.isArray()) { + if (componentType == byte.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows + ); + } else { + return VarListChunkInputStreamGenerator.extractChunkFromInputStream( + options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + } + if (Vector.class.isAssignableFrom(type)) { + //noinspection unchecked + return VectorChunkInputStreamGenerator.extractChunkFromInputStream( + options, (Class>)type, componentType, fieldNodeIter, bufferInfoIter, is, + outChunk, outOffset, totalRows); + } + if (type == BigInteger.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows + ); + } + if (type == BigDecimal.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows + ); + } + if (type == Instant.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> DateTimeUtils.epochNanosToInstant(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == ZonedDateTime.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> DateTimeUtils.epochNanosToZonedDateTime(io.readLong(), DateTimeUtils.timeZone()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Byte.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Character.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Double.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Float.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Integer.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Long.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == Short.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows + ); + } + if (type == String.class || + options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, totalRows); + } + throw new UnsupportedOperationException("Do not yet support column conversion mode: " + options.columnConversionMode()); + default: + throw new UnsupportedOperationException(); + } + } + + private static RangeSet extractIndex(final ByteBuffer bb) { + if (bb == null) { + return RangeSet.empty(); + } + return new CompressedRangeSetReader().read(bb); + } + + private static BitSet extractBitSet(final ByteBuffer bb) { + byte[] array = new byte[bb.remaining()]; + bb.get(array); + return BitSet.valueOf(array); + } + + private static ShiftedRange[] extractIndexShiftData(final ByteBuffer bb) { + return new ShiftedRangeReader().read(bb); + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index 5b5f9a0b6c4..fad1712f87c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -43,14 +43,14 @@ * Utility to read barrage record batches. */ public class WebBarrageUtils { - private static final int MAGIC = 0x6E687064; + public static final int FLATBUFFER_MAGIC = 0x6E687064; public static Uint8Array wrapMessage(FlatBufferBuilder innerBuilder, byte messageType) { // TODO this doesnt look right, probably we can append the message rather than copying? FlatBufferBuilder outerBuilder = new FlatBufferBuilder(1024); int messageOffset = BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.dataBuffer()); int offset = - BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, MAGIC, messageType, messageOffset); + BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, FLATBUFFER_MAGIC, messageType, messageOffset); outerBuilder.finish(offset); ByteBuffer byteBuffer = outerBuilder.dataBuffer(); return bbToUint8ArrayView(byteBuffer); @@ -63,7 +63,7 @@ public static Uint8Array bbToUint8ArrayView(ByteBuffer byteBuffer) { public static Uint8Array emptyMessage() { FlatBufferBuilder builder = new FlatBufferBuilder(1024); - int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, BarrageMessageType.None, 0); + int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, FLATBUFFER_MAGIC, BarrageMessageType.None, 0); builder.finish(offset); return bbToUint8ArrayView(builder.dataBuffer()); } From 38b24b3e50767bddd55cdac58ea157adf56e875f Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 17 Jan 2024 20:38:34 -0600 Subject: [PATCH 007/219] Terrible hack and slash commit, but the client compiles! 2.4MB for dh-core.js --- .../java/io/deephaven/base/verify/Assert.java | 4 +- .../io/deephaven/base/verify/Require.java | 3 +- .../base/verify/RequirementFailure.java | 4 +- .../datastructures/util/CollectionUtil.java | 6 +- .../io/streams/ByteBufferInputStream.java | 131 ++++-------- .../main/resources/io/deephaven/io/IO.gwt.xml | 3 + .../io/deephaven/util/MultiException.java | 4 +- .../deephaven/util/type/ArrayTypeUtils.java | 5 +- .../io/deephaven/util/type/TypeUtils.java | 4 +- .../main/resources/io/deephaven/Util.gwt.xml | 5 +- .../main/java/io/deephaven/chunk/Chunk.java | 3 +- .../java/io/deephaven/chunk/ChunkBase.java | 3 +- .../java/io/deephaven/chunk/ChunkHelpers.java | 8 +- .../chunk/ResettableBooleanChunk.java | 6 +- .../deephaven/chunk/ResettableByteChunk.java | 6 +- .../deephaven/chunk/ResettableCharChunk.java | 6 +- .../chunk/ResettableDoubleChunk.java | 6 +- .../deephaven/chunk/ResettableFloatChunk.java | 6 +- .../deephaven/chunk/ResettableIntChunk.java | 6 +- .../deephaven/chunk/ResettableLongChunk.java | 6 +- .../chunk/ResettableObjectChunk.java | 6 +- .../deephaven/chunk/ResettableShortChunk.java | 6 +- .../chunk/ResettableWritableBooleanChunk.java | 6 +- .../chunk/ResettableWritableByteChunk.java | 6 +- .../chunk/ResettableWritableCharChunk.java | 6 +- .../chunk/ResettableWritableDoubleChunk.java | 6 +- .../chunk/ResettableWritableFloatChunk.java | 6 +- .../chunk/ResettableWritableIntChunk.java | 6 +- .../chunk/ResettableWritableLongChunk.java | 6 +- .../chunk/ResettableWritableObjectChunk.java | 6 +- .../chunk/ResettableWritableShortChunk.java | 6 +- .../io/deephaven/chunk/Chunk.gwt.xml | 4 +- .../io/deephaven/util/QueryConstants.java | 4 +- .../barrage/BarrageStreamGeneratorImpl.java | 2 +- .../chunk/BaseChunkInputStreamGenerator.java | 13 +- .../BooleanChunkInputStreamGenerator.java | 3 + .../chunk/ByteChunkInputStreamGenerator.java | 3 + .../chunk/CharChunkInputStreamGenerator.java | 3 + .../chunk/ChunkInputStreamGenerator.java | 149 ++++++------- .../DoubleChunkInputStreamGenerator.java | 3 + .../FixedWidthChunkInputStreamGenerator.java | 2 +- .../chunk/FloatChunkInputStreamGenerator.java | 3 + .../chunk/IntChunkInputStreamGenerator.java | 3 + .../chunk/LongChunkInputStreamGenerator.java | 3 + .../chunk/ShortChunkInputStreamGenerator.java | 3 + ...ElementListHeaderInputStreamGenerator.java | 9 +- .../VarBinaryChunkInputStreamGenerator.java | 4 + .../VarListChunkInputStreamGenerator.java | 4 + .../VectorChunkInputStreamGenerator.java | 3 + .../array/ObjectArrayExpansionKernel.java | 2 +- .../barrage/util/BarrageProtoUtil.java | 7 - .../barrage/util/DefensiveCapture.java | 4 +- .../util/ExposedByteArrayOutputStream.java | 12 ++ .../barrage/util/TableToArrowConverter.java | 4 +- .../extensions/barrage/Barrage.gwt.xml | 8 + .../chunk/BarrageColumnRoundTripTest.java | 18 +- .../util/UnaryInputStreamMarshaller.java | 6 +- .../barrage/BarrageMessageRoundTripTest.java | 6 +- web/client-api/client-api.gradle | 8 +- .../io/deephaven/web/DeephavenApi.gwt.xml | 5 + .../web/client/api/WorkerConnection.java | 12 ++ .../client/api/barrage/WebBarrageMessage.java | 3 + .../api/barrage/WebBarrageStreamReader.java | 202 ++---------------- .../client/api/barrage/WebBarrageUtils.java | 8 +- .../io/LittleEndianDataInputStream.java | 198 +++++++++++++++++ .../io/LittleEndianDataOutputStream.java | 167 +++++++++++++++ .../chunk/util/pools/MultiChunkPool.java | 164 +++++++------- .../web/super/java/io/DataInput.java | 19 ++ .../web/super/java/io/DataOutput.java | 18 ++ .../web/super/java/io/DataOutputStream.java | 104 +++++++++ .../web/super/java/io/EOFException.java | 10 + .../src/main/resources/io/grpc/Grpc.gwt.xml | 3 + .../org/immutables/value/Immutables.gwt.xml | 3 + 73 files changed, 955 insertions(+), 545 deletions(-) create mode 100644 IO/src/main/resources/io/deephaven/io/IO.gwt.xml create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ExposedByteArrayOutputStream.java create mode 100644 extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataOutputStream.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java create mode 100644 web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml create mode 100644 web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml diff --git a/Base/src/main/java/io/deephaven/base/verify/Assert.java b/Base/src/main/java/io/deephaven/base/verify/Assert.java index 3377b129dc3..8d7b97e1081 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Assert.java +++ b/Base/src/main/java/io/deephaven/base/verify/Assert.java @@ -341,7 +341,9 @@ public static void notHoldsLock(Object o, String name) { // ################################################################ // instanceOf, notInstanceOf -@interface GwtIncompatible {} + @interface GwtIncompatible { + } + // ---------------------------------------------------------------- /** assert (o instanceof type) */ @GwtIncompatible diff --git a/Base/src/main/java/io/deephaven/base/verify/Require.java b/Base/src/main/java/io/deephaven/base/verify/Require.java index 6e564904a77..ba1a16d68e8 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Require.java +++ b/Base/src/main/java/io/deephaven/base/verify/Require.java @@ -548,7 +548,8 @@ public static void notHoldsLock(Object o, String name) { notHoldsLock(o, name, 1); } - @interface GwtIncompatible {} + @interface GwtIncompatible { + } // ################################################################ // instanceOf, notInstanceOf diff --git a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java index 21d34bf5dc6..93e3bf025ff 100644 --- a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java +++ b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java @@ -53,7 +53,9 @@ public void printStackTrace(PrintStream s) { s.print(getFixedStackTrace()); } - @interface GwtIncompatible {} + @interface GwtIncompatible { + } + // ---------------------------------------------------------------- @Override @GwtIncompatible diff --git a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java index a6b3f6b99df..f4ec05aa861 100644 --- a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java +++ b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java @@ -71,7 +71,7 @@ public static Map mapFromArray(Class typeK, Class typeV, fina for (int nIndex = 0; nIndex < data.length; nIndex += 2) { Object key = data[nIndex]; if (null != key) { - Require.instanceOf(key, "key", typeK); + // Require.instanceOf(key, "key", typeK); } if (!allowDuplicateKeys) { Require.requirement(false == map.containsKey(key), "false==map.containsKey(data[nIndex])", key, @@ -79,7 +79,7 @@ public static Map mapFromArray(Class typeK, Class typeV, fina } Object value = data[nIndex + 1]; if (null != value) { - Require.instanceOf(value, "value", typeV); + // Require.instanceOf(value, "value", typeV); } map.put((K) key, (V) value); } @@ -113,7 +113,7 @@ public static Set setFromArray(E... data) { public static Set setFromArray(@NotNull final Class type, @NotNull final Object... data) { final Set set = newSizedLinkedHashSet(data.length); for (final Object elem : data) { - Require.requirement(elem == null || type.isInstance(elem), "elem == null || type.isInstance(elem)"); + // Require.requirement(elem == null || type.isInstance(elem), "elem == null || type.isInstance(elem)"); // noinspection unchecked Require.requirement(set.add((TYPE) elem), "set.add((TYPE)elem)"); } diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java index 7522906028b..7b78bc802a2 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java @@ -6,41 +6,30 @@ import java.io.DataInput; import java.io.EOFException; import java.io.IOException; +import java.io.InputStream; import java.io.UTFDataFormatException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; -import io.deephaven.base.string.cache.CharSequenceAdapterBuilder; -import io.deephaven.base.string.cache.StringCache; -import org.jetbrains.annotations.NotNull; - /** * This is an InputStream implementation which reads from a java.nio.ByteBuffer. If a read operation crosses the end of * the buffer, the BufferUnderflowException is converted to an EOFException. - * - * The stream contains no state other than that in in the buffer itself, so the buffer can be exchanged at will with the + *

+ * The stream contains no state other than that in the buffer itself, so the buffer can be exchanged at will with the * setBuffer() method. + *

+ * Endianness is determined by the provided buffer itself. */ -public class ByteBufferInputStream extends java.io.InputStream implements DataInput { +public class ByteBufferInputStream extends InputStream implements DataInput { /** the buffer from which we read */ protected ByteBuffer buf; - private char[] utfChars; - - /** - * The DataOutput interface always writes bytes in big-endian order, while ByteBuffer allows the order to be big- or - * little-endian. Set this flag true to assume that the buffer is bid-endian, or false to check the buffer's order - * at each write. - */ - // protected static final boolean ASSUME_BIG_ENDIAN = true; - /** * Construct a new stream which reads from a byte buffer/ */ public ByteBufferInputStream(ByteBuffer buf) { this.buf = buf; - this.utfChars = new char[0]; } /** @@ -307,82 +296,40 @@ public String readLine() throws IOException { return new String(chars); } + @Override public String readUTF() throws IOException { - int length = 0; - int total = readUnsignedShort(); - - final char[] chars = new char[total]; - - while (total > 0) { - final int b1 = buf.get(); - if ((b1 & 0x80) == 0) { - chars[length++] = (char) (b1 & 0xff); - total--; - } else if ((b1 & 0xe0) == 0xc0) { - final int b2 = buf.get(); - if ((b2 & 0xc0) != 0x80) { - throw new UTFDataFormatException("malformed second byte " + b2); - } - chars[length++] = (char) (((b1 & 0x1F) << 6) | (b2 & 0x3F)); - total -= 2; - } else if ((b1 & 0xf0) == 0xe0) { - final int b2 = buf.get(); - final int b3 = buf.get(); - if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { - throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); - } - chars[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); - total -= 3; - } else { - throw new UTFDataFormatException("malformed first byte " + b1); - } - } - - return new String(chars, 0, length); - } - - public String readUTF(@NotNull final CharSequenceAdapterBuilder output, - @NotNull final StringCache cache) throws IOException { - readUTF(output); - return cache.getCachedString(output); - } - - public void readUTF(@NotNull final CharSequenceAdapterBuilder output) throws IOException { - int total = readUnsignedShort(); - - output.clear().reserveCapacity(total); - - while (total > 0) { - final int b1 = buf.get(); - if ((b1 & 0x80) == 0) { - output.append((char) (b1 & 0xff)); - total--; - } else if ((b1 & 0xe0) == 0xc0) { - final int b2 = buf.get(); - if ((b2 & 0xc0) != 0x80) { - throw new UTFDataFormatException("malformed second byte " + b2); - } - output.append((char) (((b1 & 0x1F) << 6) | (b2 & 0x3F))); - total -= 2; - } else if ((b1 & 0xf0) == 0xe0) { - final int b2 = buf.get(); - final int b3 = buf.get(); - if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { - throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); - } - output.append((char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F))); - total -= 3; - } else { - throw new UTFDataFormatException("malformed first byte " + b1); - } - } - } - - public void read(ByteBuffer dest, int length) { - final int sourceLimit = buf.limit(); - buf.limit(buf.position() + length); // Constrain buf.remaining() to length - dest.put(buf); - buf.limit(sourceLimit); + throw new UnsupportedOperationException("readUTF"); + // int length = 0; + // int total = readUnsignedShort(); + // + // final char[] chars = new char[total]; + // + // while (total > 0) { + // final int b1 = buf.get(); + // if ((b1 & 0x80) == 0) { + // chars[length++] = (char) (b1 & 0xff); + // total--; + // } else if ((b1 & 0xe0) == 0xc0) { + // final int b2 = buf.get(); + // if ((b2 & 0xc0) != 0x80) { + // throw new UTFDataFormatException("malformed second byte " + b2); + // } + // chars[length++] = (char) (((b1 & 0x1F) << 6) | (b2 & 0x3F)); + // total -= 2; + // } else if ((b1 & 0xf0) == 0xe0) { + // final int b2 = buf.get(); + // final int b3 = buf.get(); + // if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { + // throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); + // } + // chars[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); + // total -= 3; + // } else { + // throw new UTFDataFormatException("malformed first byte " + b1); + // } + // } + // + // return new String(chars, 0, length); } } diff --git a/IO/src/main/resources/io/deephaven/io/IO.gwt.xml b/IO/src/main/resources/io/deephaven/io/IO.gwt.xml new file mode 100644 index 00000000000..268e5cadc63 --- /dev/null +++ b/IO/src/main/resources/io/deephaven/io/IO.gwt.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/Util/src/main/java/io/deephaven/util/MultiException.java b/Util/src/main/java/io/deephaven/util/MultiException.java index 0b5d479cd83..60046ca2059 100644 --- a/Util/src/main/java/io/deephaven/util/MultiException.java +++ b/Util/src/main/java/io/deephaven/util/MultiException.java @@ -84,7 +84,9 @@ public Throwable[] getCauses() { return causes; } - @interface GwtIncompatible {} + @interface GwtIncompatible { + } + @Override @GwtIncompatible public void printStackTrace(PrintWriter s) { diff --git a/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java b/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java index 7f0e57c9211..e5915ad4794 100644 --- a/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/ArrayTypeUtils.java @@ -452,7 +452,10 @@ public static Object[] getBoxedArray(Object value) { return (Object[]) value; } } - @interface GwtIncompatible {} + + @interface GwtIncompatible { + } + @GwtIncompatible public static boolean equals(Object actualValue, Object expectedValue) { final Class ct = actualValue.getClass().getComponentType(); diff --git a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java index 4b4b9251b08..abc9043ccab 100644 --- a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java @@ -728,7 +728,9 @@ public static Class getErasedType(Type paramType) { } } - @interface GwtIncompatible {} + @interface GwtIncompatible { + } + /** * Determine the weakest parent of the two provided Classes. * diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml index b7c6a6bbfd8..c8b4989cfa6 100644 --- a/Util/src/main/resources/io/deephaven/Util.gwt.xml +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -1,9 +1,10 @@ - + - + + \ No newline at end of file diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java index 7de68e126f9..35e152dcd0f 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/Chunk.java @@ -117,8 +117,7 @@ default void copyToBuffer(int srcOffset, @NotNull Buffer destBuffer, int destOff default void checkChunkType(ChunkType expected) { final ChunkType actual = getChunkType(); if (actual != expected) { - throw new IllegalArgumentException( - String.format("Expected chunk type '%s', but is '%s'.", expected, actual)); + throw new IllegalArgumentException("Expected chunk type '" + expected + "', but is '" + actual + "'."); } } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkBase.java b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkBase.java index 05bcfbd3b06..9d885f26720 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkBase.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkBase.java @@ -44,8 +44,7 @@ public final void internalSetSize(int newSize, long password) { "DO NOT CALL THIS INTERNAL METHOD. Instead call WritableChunk.setSize()"); } if (newSize < 0 || newSize > capacity) { - throw new IllegalArgumentException( - String.format("size %d is incompatible with capacity %d", newSize, capacity)); + throw new IllegalArgumentException("size " + newSize + " is incompatible with capacity " + capacity); } this.size = newSize; diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkHelpers.java b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkHelpers.java index a212837abad..b18e9f59082 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkHelpers.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkHelpers.java @@ -11,16 +11,14 @@ public class ChunkHelpers { static void checkSliceArgs(int size, int offset, int capacity) { if (offset < 0 || offset > size || capacity < 0 || capacity > size - offset) { throw new IllegalArgumentException( - String.format("New slice offset %d, capacity %d is incompatible with size %d", - offset, capacity, size)); + "New slice offset " + offset + ", capacity " + capacity + " is incompatible with size " + size); } } static void checkArrayArgs(int arrayLength, int offset, int capacity) { if (offset < 0 || capacity < 0 || capacity > arrayLength - offset) { - throw new IllegalArgumentException( - String.format("offset %d, capacity %d is incompatible with array of length %d", - offset, capacity, arrayLength)); + throw new IllegalArgumentException("offset " + offset + ", capacity " + capacity + + " is incompatible with array of length " + arrayLength); } } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java index c61f51cb6e5..a0ea3d0cef8 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java @@ -21,9 +21,9 @@ public class ResettableBooleanChunk implements ResettableReadOnlyChunk { public static ResettableBooleanChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableBooleanChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableBooleanChunk(); + // } return new ResettableBooleanChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java index ee6cac130fb..047ab43e77e 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java @@ -21,9 +21,9 @@ public class ResettableByteChunk implements ResettableReadOnlyChunk { public static ResettableByteChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableByteChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableByteChunk(); + // } return new ResettableByteChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java index c18d43cd8b0..6a6106e498c 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java @@ -17,9 +17,9 @@ public class ResettableCharChunk implements ResettableReadOnlyChunk { public static ResettableCharChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableCharChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableCharChunk(); + // } return new ResettableCharChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java index db9118a2e97..5c3a5c556ff 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java @@ -21,9 +21,9 @@ public class ResettableDoubleChunk implements ResettableReadOnlyChunk { public static ResettableDoubleChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableDoubleChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableDoubleChunk(); + // } return new ResettableDoubleChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java index e739e35364f..eeefb014eb5 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java @@ -21,9 +21,9 @@ public class ResettableFloatChunk implements ResettableReadOnlyChunk { public static ResettableFloatChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableFloatChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableFloatChunk(); + // } return new ResettableFloatChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java index 06af943913b..d1f71e47918 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java @@ -21,9 +21,9 @@ public class ResettableIntChunk implements ResettableReadOnlyChunk { public static ResettableIntChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableIntChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableIntChunk(); + // } return new ResettableIntChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java index 20967a341d7..91b70991084 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java @@ -21,9 +21,9 @@ public class ResettableLongChunk implements ResettableReadOnlyChunk { public static ResettableLongChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableLongChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableLongChunk(); + // } return new ResettableLongChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java index 94988281898..181ce7a240a 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java @@ -21,9 +21,9 @@ public class ResettableObjectChunk implements ResettableReadOnlyChunk { public static ResettableObjectChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableObjectChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableObjectChunk(); + // } return new ResettableObjectChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java index 3d15e10884a..773e37c3c2e 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java @@ -21,9 +21,9 @@ public class ResettableShortChunk implements ResettableReadOnlyChunk { public static ResettableShortChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableShortChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableShortChunk(); + // } return new ResettableShortChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java index f895aed46fa..d622ee69cc1 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableBooleanChunk implements ResettableWritableChunk { public static ResettableWritableBooleanChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableBooleanChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableBooleanChunk(); + // } return new ResettableWritableBooleanChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java index 3636270fb79..bd0102d8fa5 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableByteChunk implements ResettableWritableChunk { public static ResettableWritableByteChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableByteChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableByteChunk(); + // } return new ResettableWritableByteChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java index dd47b435998..36132fe46be 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java @@ -17,9 +17,9 @@ public class ResettableWritableCharChunk implements ResettableWritableChunk { public static ResettableWritableCharChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableCharChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableCharChunk(); + // } return new ResettableWritableCharChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java index 00f7a879c75..f6a17b77916 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableDoubleChunk implements ResettableWritableChunk { public static ResettableWritableDoubleChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableDoubleChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableDoubleChunk(); + // } return new ResettableWritableDoubleChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java index 003165f905c..cfb93fbfe85 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableFloatChunk implements ResettableWritableChunk { public static ResettableWritableFloatChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableFloatChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableFloatChunk(); + // } return new ResettableWritableFloatChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java index 170cf47e4df..a456bbf49e6 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableIntChunk implements ResettableWritableChunk { public static ResettableWritableIntChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableIntChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableIntChunk(); + // } return new ResettableWritableIntChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java index 7635704ce27..e160012ccc2 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableLongChunk implements ResettableWritableChunk { public static ResettableWritableLongChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableLongChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableLongChunk(); + // } return new ResettableWritableLongChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java index 9062ac29e15..9e1f64be430 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableObjectChunk implements ResettableWritableChunk { public static ResettableWritableObjectChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableObjectChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableObjectChunk(); + // } return new ResettableWritableObjectChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java index af93738f02e..29e64adb82d 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableShortChunk implements ResettableWritableChunk { public static ResettableWritableShortChunk makeResettableChunk() { - if (POOL_RESETTABLE_CHUNKS) { - return MultiChunkPool.forThisThread().takeResettableWritableShortChunk(); - } + // if (POOL_RESETTABLE_CHUNKS) { + // return MultiChunkPool.forThisThread().takeResettableWritableShortChunk(); + // } return new ResettableWritableShortChunk<>(); } diff --git a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml index d786d2e3976..1b8c6331dff 100644 --- a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml +++ b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml @@ -1,5 +1,7 @@ - + + + \ No newline at end of file diff --git a/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java b/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java index 0dcb1cd4a6a..703ae560ab7 100644 --- a/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java +++ b/engine/query-constants/src/main/java/io/deephaven/util/QueryConstants.java @@ -187,7 +187,9 @@ private QueryConstants() {} /** * Minimum finite value of type float. */ - @interface GwtIncompatible {} + @interface GwtIncompatible { + } + @GwtIncompatible public static final float MIN_FINITE_FLOAT = Math.nextUp(-Float.MAX_VALUE); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 975cbe8d3f4..38d9d6816d8 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -27,7 +27,7 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; -import io.deephaven.extensions.barrage.util.BarrageProtoUtil.ExposedByteArrayOutputStream; +import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.extensions.barrage.util.StreamReaderOptions; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java index ab4c5bd20b8..f220d7acff7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSequenceFactory; @@ -25,6 +26,7 @@ public abstract class BaseChunkInputStreamGenerator> imp // Field updater for refCount, so we can avoid creating an {@link java.util.concurrent.atomic.AtomicInteger} for // each instance. @SuppressWarnings("rawtypes") + @GwtIncompatible protected static final AtomicIntegerFieldUpdater REFERENCE_COUNT_UPDATER = AtomicIntegerFieldUpdater.newUpdater(BaseChunkInputStreamGenerator.class, "refCount"); @@ -51,11 +53,11 @@ public long getLastRowOffset() { @Override public void close() { - if (REFERENCE_COUNT_UPDATER.decrementAndGet(this) == 0) { - if (chunk instanceof PoolableChunk) { - ((PoolableChunk) chunk).close(); - } - } + // if (REFERENCE_COUNT_UPDATER.decrementAndGet(this) == 0) { + // if (chunk instanceof PoolableChunk) { + // ((PoolableChunk) chunk).close(); + // } + // } } /** @@ -78,6 +80,7 @@ protected static int getNumLongsForBitPackOfSize(final int numElements) { return ((numElements + 63) / 64); } + @GwtIncompatible abstract class BaseChunkInputStream extends DrainableColumn { protected final StreamReaderOptions options; protected final RowSequence subset; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java index 8c1503b67f8..88cc47b4135 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -49,10 +50,12 @@ public static BooleanChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new BooleanChunkInputStream(options, subset); } + @GwtIncompatible private class BooleanChunkInputStream extends BaseChunkInputStream { private BooleanChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 66f6e1550e5..6530a1731d1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static ByteChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new ByteChunkInputStream(options, subset); } + @GwtIncompatible private class ByteChunkInputStream extends BaseChunkInputStream { private ByteChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 75a3c81d2fe..32c579f4952 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -48,10 +49,12 @@ public static CharChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new CharChunkInputStream(options, subset); } + @GwtIncompatible private class CharChunkInputStream extends BaseChunkInputStream { private CharChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 2eed122689a..6449607d499 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -4,6 +4,7 @@ package io.deephaven.extensions.barrage.chunk; import com.google.common.base.Charsets; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; @@ -26,6 +27,7 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.ZonedDateTime; import java.util.Arrays; @@ -70,14 +72,14 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( return new VarListChunkInputStreamGenerator<>(type, chunk.asObjectChunk(), rowOffset); } } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return new VectorChunkInputStreamGenerator( - (Class>) type, componentType, chunk.asObjectChunk(), rowOffset); - } + // if (Vector.class.isAssignableFrom(type)) { + // // noinspection unchecked + // return new VectorChunkInputStreamGenerator( + // (Class>) type, componentType, chunk.asObjectChunk(), rowOffset); + // } if (type == String.class) { return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, - (out, str) -> out.write(str.getBytes(Charsets.UTF_8))); + (out, str) -> out.write(str.getBytes(StandardCharsets.UTF_8))); } if (type == BigInteger.class) { return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, @@ -96,30 +98,30 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( out.write(normal.unscaledValue().toByteArray()); }); } - if (type == Instant.class) { - // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. - ObjectChunk objChunk = chunk.asObjectChunk(); - WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); - for (int i = 0; i < objChunk.size(); ++i) { - outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); - } - if (chunk instanceof PoolableChunk) { - ((PoolableChunk) chunk).close(); - } - return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); - } - if (type == ZonedDateTime.class) { - // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. - ObjectChunk objChunk = chunk.asObjectChunk(); - WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); - for (int i = 0; i < objChunk.size(); ++i) { - outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); - } - if (chunk instanceof PoolableChunk) { - ((PoolableChunk) chunk).close(); - } - return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); - } + // if (type == Instant.class) { + // // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + // ObjectChunk objChunk = chunk.asObjectChunk(); + // WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + // for (int i = 0; i < objChunk.size(); ++i) { + // outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); + // } + // if (chunk instanceof PoolableChunk) { + // ((PoolableChunk) chunk).close(); + // } + // return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + // } + // if (type == ZonedDateTime.class) { + // // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + // ObjectChunk objChunk = chunk.asObjectChunk(); + // WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + // for (int i = 0; i < objChunk.size(); ++i) { + // outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); + // } + // if (chunk instanceof PoolableChunk) { + // ((PoolableChunk) chunk).close(); + // } + // return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + // } if (type == Boolean.class) { return BooleanChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); } @@ -147,7 +149,7 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( // TODO (core#936): support column conversion modes return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, - (out, item) -> out.write(item.toString().getBytes(Charsets.UTF_8))); + (out, item) -> out.write(item.toString().getBytes(StandardCharsets.UTF_8))); default: throw new UnsupportedOperationException(); } @@ -222,47 +224,47 @@ static WritableChunk extractChunkFromInputStream( options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return VectorChunkInputStreamGenerator.extractChunkFromInputStream( - options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, outOffset, totalRows); - } - if (type == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); - } - if (type == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); - } - if (type == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> DateTimeUtils.epochNanosToInstant(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, - io -> DateTimeUtils.epochNanosToZonedDateTime(io.readLong(), DateTimeUtils.timeZone()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } + // if (Vector.class.isAssignableFrom(type)) { + // // noinspection unchecked + // return VectorChunkInputStreamGenerator.extractChunkFromInputStream( + // options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, + // outChunk, outOffset, totalRows); + // } + // if (type == BigInteger.class) { + // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + // is, + // fieldNodeIter, + // bufferInfoIter, + // BigInteger::new, + // outChunk, outOffset, totalRows); + // } + // if (type == BigDecimal.class) { + // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + // is, + // fieldNodeIter, + // bufferInfoIter, + // (final byte[] buf, final int offset, final int length) -> { + // // read the int scale value as little endian, arrow's endianness. + // final byte b1 = buf[offset]; + // final byte b2 = buf[offset + 1]; + // final byte b3 = buf[offset + 2]; + // final byte b4 = buf[offset + 3]; + // final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + // return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + // }, + // outChunk, outOffset, totalRows); + // } + // if (type == Instant.class) { + // return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + // Long.BYTES, options, io -> DateTimeUtils.epochNanosToInstant(io.readLong()), + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } + // if (type == ZonedDateTime.class) { + // return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + // Long.BYTES, options, + // io -> DateTimeUtils.epochNanosToZonedDateTime(io.readLong(), DateTimeUtils.timeZone()), + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } if (type == Byte.class) { return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), @@ -302,7 +304,7 @@ static WritableChunk extractChunkFromInputStream( options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + (buf, off, len) -> new String(buf, off, len, StandardCharsets.UTF_8), outChunk, outOffset, totalRows); } throw new UnsupportedOperationException( @@ -329,6 +331,7 @@ static WritableChunk extractChunkFromInputStream( * @param subset if provided, is a position-space filter of source data * @return a single-use DrainableColumn ready to be drained via grpc */ + @GwtIncompatible DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException; final class FieldNodeInfo { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index edd1a886374..38f6bbc0772 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static DoubleChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new DoubleChunkInputStream(options, subset); } + @GwtIncompatible private class DoubleChunkInputStream extends BaseChunkInputStream { private DoubleChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java index 46541c8ec1d..6154735d0ae 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java @@ -25,7 +25,7 @@ public interface TypeConversion { /** * Generic input stream reading from arrow's buffer and convert directly to java type. - * + *

* If useDeephavenNulls is enabled, then the conversion method must properly return a null value. * * @param elementSize the number of bytes per element (element size is fixed) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index c5fe9d4567c..4efd43da795 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static FloatChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new FloatChunkInputStream(options, subset); } + @GwtIncompatible private class FloatChunkInputStream extends BaseChunkInputStream { private FloatChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 76b885df4a0..68d3675e471 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static IntChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new IntChunkInputStream(options, subset); } + @GwtIncompatible private class IntChunkInputStream extends BaseChunkInputStream { private IntChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index 45a0a2612c2..605226c4076 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static LongChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new LongChunkInputStream(options, subset); } + @GwtIncompatible private class LongChunkInputStream extends BaseChunkInputStream { private LongChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 21caf700c41..eac69513e2e 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -7,6 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -52,10 +53,12 @@ public static ShortChunkInputStreamGenerator convertBoxed( } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new ShortChunkInputStream(options, subset); } + @GwtIncompatible private class ShortChunkInputStream extends BaseChunkInputStream { private ShortChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/SingleElementListHeaderInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/SingleElementListHeaderInputStreamGenerator.java index f2a5cdc552d..3844405f814 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/SingleElementListHeaderInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/SingleElementListHeaderInputStreamGenerator.java @@ -41,10 +41,15 @@ public int nullCount() { return 0; } - @SuppressWarnings("UnstableApiUsage") + /** + * This implementation is unusual in that if called again, it will result in the same data, rather than writing no + * data. + * + * @param outputStream to write to. + * @return number of bytes drained + */ @Override public int drainTo(final OutputStream outputStream) throws IOException { - // allow this input stream to be re-read final LittleEndianDataOutputStream dos = new LittleEndianDataOutputStream(outputStream); // write offsets array diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java index aab6fb81905..c5b7b59f417 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; import io.deephaven.base.verify.Assert; @@ -214,6 +215,7 @@ private synchronized void computePayload() throws IOException { } @Override + @GwtIncompatible public void close() { if (REFERENCE_COUNT_UPDATER.decrementAndGet(this) == 0) { if (chunk instanceof PoolableChunk) { @@ -226,12 +228,14 @@ public void close() { } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new ObjectChunkInputStream(options, subset); } + @GwtIncompatible private class ObjectChunkInputStream extends BaseChunkInputStream { private int cachedSize = -1; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index a6a0c872f7f..9c8144925a0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; import io.deephaven.chunk.attributes.ChunkPositions; @@ -63,6 +64,7 @@ private synchronized void computePayload() { } @Override + @GwtIncompatible public void close() { if (REFERENCE_COUNT_UPDATER.decrementAndGet(this) == 0) { if (chunk instanceof PoolableChunk) { @@ -78,12 +80,14 @@ public void close() { } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new VarListInputStream(options, subset); } + @GwtIncompatible private class VarListInputStream extends BaseChunkInputStream { private int cachedSize = -1; private final WritableIntChunk myOffsets; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 5fbaf07448d..f55d821c231 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -3,6 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; +import com.google.common.annotations.GwtIncompatible; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; import io.deephaven.chunk.ChunkType; @@ -78,12 +79,14 @@ public void close() { } @Override + @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new VarListInputStream(options, subset); } + @GwtIncompatible private class VarListInputStream extends BaseChunkInputStream { private int cachedSize = -1; private final WritableIntChunk myOffsets; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/array/ObjectArrayExpansionKernel.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/array/ObjectArrayExpansionKernel.java index 3166473733c..becdd827030 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/array/ObjectArrayExpansionKernel.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/array/ObjectArrayExpansionKernel.java @@ -86,7 +86,7 @@ public WritableObjectChunk contract( if (rowLen == 0) { result.set(outOffset + i, CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); } else { - final Object[] row = (Object[]) Array.newInstance(componentType, rowLen); + final Object[] row = (Object[]) new Object[rowLen]; typedSource.copyToArray(lenRead, row, 0, rowLen); lenRead += rowLen; result.set(outOffset + i, row); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageProtoUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageProtoUtil.java index 9bc4af6fb09..cf4a30f7b78 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageProtoUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageProtoUtil.java @@ -22,7 +22,6 @@ import org.jetbrains.annotations.NotNull; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -65,12 +64,6 @@ public static RowSet toRowSet(final ByteBuffer string) { } } - public static class ExposedByteArrayOutputStream extends ByteArrayOutputStream { - public byte[] peekBuffer() { - return buf; - } - } - public static class ObjectInputStreamAdapter extends InputStream { private int sizeRemaining; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/DefensiveCapture.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/DefensiveCapture.java index 5bd8148afc7..b8642d6e155 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/DefensiveCapture.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/DefensiveCapture.java @@ -18,11 +18,11 @@ public DefensiveCapture(DefensiveDrainable in) { this.in = Objects.requireNonNull(in); } - synchronized InputStream delegate() throws IOException { + private synchronized InputStream delegate() throws IOException { if (delegate != null) { return delegate; } - final BarrageProtoUtil.ExposedByteArrayOutputStream out = new BarrageProtoUtil.ExposedByteArrayOutputStream(); + final ExposedByteArrayOutputStream out = new ExposedByteArrayOutputStream(); final int size = in.drainTo(out); in.close(); in = null; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ExposedByteArrayOutputStream.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ExposedByteArrayOutputStream.java new file mode 100644 index 00000000000..5c071322a19 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ExposedByteArrayOutputStream.java @@ -0,0 +1,12 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.util; + +import java.io.ByteArrayOutputStream; + +public class ExposedByteArrayOutputStream extends ByteArrayOutputStream { + public byte[] peekBuffer() { + return buf; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java index c3ccb4df4c3..917967297b2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java @@ -65,8 +65,8 @@ private static class ArrowBuilderObserver implements StreamObserver { - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream()) { ((Drainable) inputStream).drainTo(baos); batchMessages.add(baos.toByteArray()); inputStream.close(); diff --git a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml new file mode 100644 index 00000000000..a482295bb09 --- /dev/null +++ b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index 5bd3be47213..0aff2eac5d0 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -22,7 +22,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.WritableShortChunk; -import io.deephaven.extensions.barrage.util.BarrageProtoUtil; +import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; import io.deephaven.util.BooleanUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.SafeCloseable; @@ -562,8 +562,8 @@ private static void testRoundTripSerialization( ChunkInputStreamGenerator generator = ChunkInputStreamGenerator.makeInputStreamGenerator( chunkType, type, type.getComponentType(), srcData, 0)) { // full sub logic - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream(); + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream(); final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, null)) { @@ -587,8 +587,8 @@ private static void testRoundTripSerialization( } // empty subset - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream(); + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream(); final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, RowSetFactory.empty())) { @@ -617,8 +617,8 @@ private static void testRoundTripSerialization( builder.appendKey(i); } } - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream(); + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream(); final RowSet subset = builder.build(); final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, subset)) { @@ -642,8 +642,8 @@ private static void testRoundTripSerialization( } // test append to existing chunk logic - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream(); + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream(); final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, null)) { diff --git a/server/src/main/java/io/deephaven/server/util/UnaryInputStreamMarshaller.java b/server/src/main/java/io/deephaven/server/util/UnaryInputStreamMarshaller.java index 05e8258746d..324a5cd1625 100644 --- a/server/src/main/java/io/deephaven/server/util/UnaryInputStreamMarshaller.java +++ b/server/src/main/java/io/deephaven/server/util/UnaryInputStreamMarshaller.java @@ -3,7 +3,7 @@ // package io.deephaven.server.util; -import io.deephaven.extensions.barrage.util.BarrageProtoUtil; +import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; import io.grpc.MethodDescriptor; import java.io.ByteArrayInputStream; @@ -20,8 +20,8 @@ public InputStream stream(InputStream value) { @Override public InputStream parse(InputStream stream) { - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream()) { final byte[] buffer = new byte[4096]; while (stream.available() > 0) { int len = stream.read(buffer); diff --git a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java index 4c4f3542851..dc22bf0316a 100644 --- a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java +++ b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java @@ -30,8 +30,8 @@ import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.table.BarrageTable; -import io.deephaven.extensions.barrage.util.BarrageProtoUtil; import io.deephaven.extensions.barrage.util.BarrageStreamReader; +import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.server.arrow.ArrowModule; import io.deephaven.server.session.SessionService; @@ -1425,8 +1425,8 @@ public static class DummyObserver implements StreamObserver { - try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + try (final ExposedByteArrayOutputStream baos = + new ExposedByteArrayOutputStream()) { ((Drainable) inputStream).drainTo(baos); inputStream.close(); final BarrageMessage message = diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index dad6ccb6cea..df46ff1decf 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -22,9 +22,15 @@ repositories { mavenLocal() } dependencies { + Classpaths.inheritGrpcPlatform(project) + implementation(group:'io.grpc', name:'grpc-api', classifier:'sources') + implementation project(':engine-chunk') + implementation project(':extensions-barrage') + implementation project(':DataStructures') implementation 'org.jetbrains:annotations:13.0' -// implementation 'org.jetbrains:annotations:13.0:sources' + implementation 'org.immutables:value-annotations:2.9.2' +// implementation 'org.immutables:value-annotations:2.9.2:sources' implementation project(':web-shared-beans') implementation project(':web-client-backplane') diff --git a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml index a066490dbd5..9649d1d487c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml +++ b/web/client-api/src/main/java/io/deephaven/web/DeephavenApi.gwt.xml @@ -8,9 +8,14 @@ + + + + + diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index 2c781af0f3a..e5e79d06428 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -19,6 +19,8 @@ import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; import io.deephaven.barrage.flatbuf.ColumnConversionMode; +import io.deephaven.chunk.ChunkType; +import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.browserflight_pb_service.BrowserFlightServiceClient; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.HandshakeRequest; @@ -58,6 +60,8 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb_service.TableServiceClient; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.Ticket; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; +import io.deephaven.web.client.api.barrage.WebBarrageMessage; +import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; import io.deephaven.web.client.api.barrage.WebBarrageUtils; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; @@ -106,6 +110,7 @@ import org.gwtproject.nio.TypedArrayHelper; import javax.annotation.Nullable; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -1459,6 +1464,13 @@ private void flush() { stream.onData(new JsConsumer() { @Override public void apply(FlightData data) { + try { + WebBarrageMessage webBarrageMessage = + new WebBarrageStreamReader().parseFrom(BarrageSnapshotOptions.builder().build(), + includedColumns, new ChunkType[0], new Class[0], new Class[0], data); + } catch (IOException e) { + throw new RuntimeException(e); + } ByteBuffer body = typedArrayToAlignedLittleEndianByteBuffer(data.getDataBody_asU8()); Message headerMessage = Message .getRootAsMessage(TypedArrayHelper.wrap(data.getDataHeader_asU8())); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java index efadd81aa5d..1b26f2ccadb 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java @@ -1,3 +1,6 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.web.client.api.barrage; import io.deephaven.chunk.Chunk; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index 4bf52878cd3..47a1587f107 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -1,5 +1,9 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.web.client.api.barrage; +import com.google.common.io.LittleEndianDataInputStream; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; @@ -7,9 +11,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.util.FlatBufferIteratorAdapter; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.io.streams.ByteBufferInputStream; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.util.type.TypeUtils; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; import org.apache.arrow.flatbuf.Message; @@ -17,22 +24,17 @@ import org.apache.arrow.flatbuf.RecordBatch; import org.gwtproject.nio.TypedArrayHelper; -import java.io.DataInput; import java.io.IOException; -import java.math.BigDecimal; -import java.math.BigInteger; import java.nio.ByteBuffer; -import java.time.Instant; -import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; +import java.util.PrimitiveIterator; /** - * Consumes FlightData fields from Flight/Barrage producers and builds - * browser-compatible WebBarrageMessage payloads that can be used to - * maintain table data. + * Consumes FlightData fields from Flight/Barrage producers and builds browser-compatible WebBarrageMessage payloads + * that can be used to maintain table data. */ public class WebBarrageStreamReader { private static final int MAX_CHUNK_SIZE = Integer.MAX_VALUE - 8; @@ -47,8 +49,9 @@ public class WebBarrageStreamReader { // hold in-progress messages that aren't finished being built private WebBarrageMessage msg; - public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, - FlightData flightData) { + public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet expectedColumns, + ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, + FlightData flightData) throws IOException { ByteBuffer headerAsBB = TypedArrayHelper.wrap(flightData.getDataHeader_asU8()); Message header = headerAsBB.hasRemaining() ? Message.getRootAsMessage(headerAsBB) : null; @@ -57,7 +60,7 @@ public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChu BarrageMessageWrapper wrapper = BarrageMessageWrapper.getRootAsBarrageMessageWrapper(msgAsBB); if (wrapper.magic() != WebBarrageUtils.FLATBUFFER_MAGIC) { - //TODO warn + // TODO warn } else if (wrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata) { if (msg != null) { throw new IllegalStateException( @@ -153,12 +156,13 @@ public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChu final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); msg.length = batch.length(); - + final LittleEndianDataInputStream ois = + new LittleEndianDataInputStream(new ByteBufferInputStream(body)); final Iterator fieldNodeIter = new FlatBufferIteratorAdapter<>(batch.nodesLength(), i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); - final TLongArrayList bufferInfo = new TLongArrayList(batch.buffersLength()); + final long[] bufferInfo = new long[batch.buffersLength()]; for (int bi = 0; bi < batch.buffersLength(); ++bi) { int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); @@ -168,9 +172,9 @@ public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChu // our parsers handle overhanging buffers length += Math.max(0, nextOffset - offset - length); } - bufferInfo.add(length); + bufferInfo[bi] = length; } - final TLongIterator bufferInfoIter = bufferInfo.iterator(); + final PrimitiveIterator.OfLong bufferInfoIter = Arrays.stream(bufferInfo).iterator(); // add and mod rows are never combined in a batch. all added rows must be received before the first @@ -256,172 +260,6 @@ public WebBarrageMessage parseFrom(BitSet expectedColumns, ChunkType[] columnChu return null; } - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkType chunkType, final Class type, final Class componentType, - final Iterator fieldNodeIter, - final TLongIterator bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { - return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, offset, totalRows); - } - - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final int factor, - final ChunkType chunkType, final Class type, final Class componentType, - final Iterator fieldNodeIter, - final TLongIterator bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - switch (chunkType) { - case Boolean: - throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); - case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - Character.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Byte: - if (type == Boolean.class || type == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - Byte.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - Short.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - Integer.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Long: - if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - Long.BYTES, options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - Long.BYTES, options, - (long v) -> (v*factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - Float.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - Double.BYTES, options,fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Object: - if (type.isArray()) { - if (componentType == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows - ); - } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream( - options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - } - if (Vector.class.isAssignableFrom(type)) { - //noinspection unchecked - return VectorChunkInputStreamGenerator.extractChunkFromInputStream( - options, (Class>)type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, outOffset, totalRows); - } - if (type == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows - ); - } - if (type == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows - ); - } - if (type == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> DateTimeUtils.epochNanosToInstant(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> DateTimeUtils.epochNanosToZonedDateTime(io.readLong(), DateTimeUtils.timeZone()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows - ); - } - if (type == String.class || - options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, totalRows); - } - throw new UnsupportedOperationException("Do not yet support column conversion mode: " + options.columnConversionMode()); - default: - throw new UnsupportedOperationException(); - } - } - private static RangeSet extractIndex(final ByteBuffer bb) { if (bb == null) { return RangeSet.empty(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index fad1712f87c..3f752d3796a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -50,7 +50,8 @@ public static Uint8Array wrapMessage(FlatBufferBuilder innerBuilder, byte messag FlatBufferBuilder outerBuilder = new FlatBufferBuilder(1024); int messageOffset = BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.dataBuffer()); int offset = - BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, FLATBUFFER_MAGIC, messageType, messageOffset); + BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, FLATBUFFER_MAGIC, messageType, + messageOffset); outerBuilder.finish(offset); ByteBuffer byteBuffer = outerBuilder.dataBuffer(); return bbToUint8ArrayView(byteBuffer); @@ -63,7 +64,8 @@ public static Uint8Array bbToUint8ArrayView(ByteBuffer byteBuffer) { public static Uint8Array emptyMessage() { FlatBufferBuilder builder = new FlatBufferBuilder(1024); - int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, FLATBUFFER_MAGIC, BarrageMessageType.None, 0); + int offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, FLATBUFFER_MAGIC, + BarrageMessageType.None, 0); builder.finish(offset); return bbToUint8ArrayView(builder.dataBuffer()); } @@ -220,7 +222,7 @@ public static ByteBuffer serializeRanges(Set rangeSets) { public static ByteBuffer typedArrayToAlignedLittleEndianByteBuffer(TypedArray data) { // Slice before wrapping to align contents - //TODO potentially only do this if not aligned already + // TODO potentially only do this if not aligned already ByteBuffer bb = TypedArrayHelper.wrap(data.slice()); bb.order(ByteOrder.LITTLE_ENDIAN); return bb; diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java new file mode 100644 index 00000000000..4d7ce7b44df --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java @@ -0,0 +1,198 @@ +/* + * Copyright (C) 2007 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.common.io; + +import org.gwtproject.nio.Numbers; + +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.EOFException; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +/** + * An implementation of {@link DataInput} that uses little-endian byte ordering for reading {@code + * short}, {@code int}, {@code float}, {@code double}, and {@code long} values. + * + *

Note: This class intentionally violates the specification of its supertype {@code + * DataInput}, which explicitly requires big-endian byte order. + * + * @author Chris Nokleberg + * @author Keith Bottner + * @since 8.0 + */ +public final class LittleEndianDataInputStream extends FilterInputStream implements DataInput { + + public LittleEndianDataInputStream(InputStream in) { + super(Objects.requireNonNull(in)); + } + + /** This method will throw an {@link UnsupportedOperationException}. */ + @Override + public String readLine() { + throw new UnsupportedOperationException("readLine is not supported"); + } + + @Override + public void readFully(byte[] b) throws IOException { + int total = 0; + while (total < b.length) { + int result = read(b, total, b.length - total); + if (result == -1) { + break; + } + total += result; + } + } + + @Override + public void readFully(byte[] b, int off, int len) throws IOException { + int total = 0; + while (total < len) { + int result = read(b, off + total, len - total); + if (result == -1) { + break; + } + total += result; + } + } + + @Override + public int skipBytes(int n) throws IOException { + return (int) in.skip(n); + } + + @Override + public int readUnsignedByte() throws IOException { + int b1 = in.read(); + if (0 > b1) { + throw new EOFException(); + } + + return b1; + } + + @Override + public int readUnsignedShort() throws IOException { + byte b1 = readAndCheckByte(); + byte b2 = readAndCheckByte(); + + int result = b2; + result = (result << 8) | b1; + + return result; + } + + @Override + public int readInt() throws IOException { + byte b1 = readAndCheckByte(); + byte b2 = readAndCheckByte(); + byte b3 = readAndCheckByte(); + byte b4 = readAndCheckByte(); + + int result = b4; + result = (result << 8) | b3; + result = (result << 8) | b2; + result = (result << 8) | b1; + + return result; + } + + @Override + public long readLong() throws IOException { + byte b1 = readAndCheckByte(); + byte b2 = readAndCheckByte(); + byte b3 = readAndCheckByte(); + byte b4 = readAndCheckByte(); + byte b5 = readAndCheckByte(); + byte b6 = readAndCheckByte(); + byte b7 = readAndCheckByte(); + byte b8 = readAndCheckByte(); + + int result = b8; + result = (result << 8) | b7; + result = (result << 8) | b6; + result = (result << 8) | b5; + result = (result << 8) | b4; + result = (result << 8) | b3; + result = (result << 8) | b2; + result = (result << 8) | b1; + + return result; + } + + /** + * Reads a {@code float} as specified by {@link DataInputStream#readFloat()}, except using + * little-endian byte order. + * + * @return the next four bytes of the input stream, interpreted as a {@code float} in + * little-endian byte order + * @throws IOException if an I/O error occurs + */ + @Override + public float readFloat() throws IOException { + return Numbers.intBitsToFloat(readInt()); + } + + /** + * Reads a {@code double} as specified by {@link DataInputStream#readDouble()}, except using + * little-endian byte order. + * + * @return the next eight bytes of the input stream, interpreted as a {@code double} in + * little-endian byte order + * @throws IOException if an I/O error occurs + */ + @Override + public double readDouble() throws IOException { + return Numbers.longBitsToDouble(readLong()); + } + + @Override + public String readUTF() throws IOException { + throw new UnsupportedOperationException("readUTF"); + } + + @Override + public short readShort() throws IOException { + return (short) readUnsignedShort(); + } + + @Override + public char readChar() throws IOException { + return (char) readUnsignedShort(); + } + + @Override + public byte readByte() throws IOException { + return (byte) readUnsignedByte(); + } + + @Override + public boolean readBoolean() throws IOException { + return readUnsignedByte() != 0; + } + + private byte readAndCheckByte() throws IOException, EOFException { + int b1 = in.read(); + + if (-1 == b1) { + throw new EOFException(); + } + + return (byte) b1; + } +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataOutputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataOutputStream.java new file mode 100644 index 00000000000..12651d83ddf --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataOutputStream.java @@ -0,0 +1,167 @@ +/* + * Copyright (C) 2007 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package com.google.common.io; + +import org.gwtproject.nio.Numbers; + +import java.io.DataOutput; +import java.io.DataOutputStream; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Objects; + +/** + * An implementation of {@link DataOutput} that uses little-endian byte ordering for writing {@code + * char}, {@code short}, {@code int}, {@code float}, {@code double}, and {@code long} values. + * + *

Note: This class intentionally violates the specification of its supertype {@code + * DataOutput}, which explicitly requires big-endian byte order. + * + * @author Chris Nokleberg + * @author Keith Bottner + * @since 8.0 + */ +public final class LittleEndianDataOutputStream extends FilterOutputStream implements DataOutput { + + /** + * Creates a {@code LittleEndianDataOutputStream} that wraps the given stream. + * + * @param out the stream to delegate to + */ + public LittleEndianDataOutputStream(OutputStream out) { + super(new DataOutputStream(Objects.requireNonNull(out))); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + // Override slow FilterOutputStream impl + out.write(b, off, len); + } + + @Override + public void writeBoolean(boolean v) throws IOException { + ((DataOutputStream) out).writeBoolean(v); + } + + @Override + public void writeByte(int v) throws IOException { + ((DataOutputStream) out).writeByte(v); + } + + /** + * @deprecated The semantics of {@code writeBytes(String s)} are considered dangerous. Please use + * {@link #writeUTF(String s)}, {@link #writeChars(String s)} or another write method instead. + */ + @Deprecated + @Override + public void writeBytes(String s) throws IOException { + ((DataOutputStream) out).writeBytes(s); + } + + /** + * Writes a char as specified by {@link DataOutputStream#writeChar(int)}, except using + * little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeChar(int v) throws IOException { + writeShort(v); + } + + /** + * Writes a {@code String} as specified by {@link DataOutputStream#writeChars(String)}, except + * each character is written using little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeChars(String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + writeChar(s.charAt(i)); + } + } + + /** + * Writes a {@code double} as specified by {@link DataOutputStream#writeDouble(double)}, except + * using little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeDouble(double v) throws IOException { + writeLong(Double.doubleToLongBits(v)); + } + + /** + * Writes a {@code float} as specified by {@link DataOutputStream#writeFloat(float)}, except using + * little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeFloat(float v) throws IOException { + writeInt(Numbers.floatToIntBits(v)); + } + + /** + * Writes an {@code int} as specified by {@link DataOutputStream#writeInt(int)}, except using + * little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeInt(int v) throws IOException { + out.write(0xFF & v); + out.write(0xFF & (v >> 8)); + out.write(0xFF & (v >> 16)); + out.write(0xFF & (v >> 24)); + } + + /** + * Writes a {@code long} as specified by {@link DataOutputStream#writeLong(long)}, except using + * little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeLong(long v) throws IOException { + out.write(0xFF & (int) v); + out.write(0xFF & (int) (v >> 8)); + out.write(0xFF & (int) (v >> 16)); + out.write(0xFF & (int) (v >> 24)); + out.write(0xFF & (int) (v >> 32)); + out.write(0xFF & (int) (v >> 40)); + out.write(0xFF & (int) (v >> 48)); + out.write(0xFF & (int) (v >> 56)); + } + + /** + * Writes a {@code short} as specified by {@link DataOutputStream#writeShort(int)}, except using + * little-endian byte order. + * + * @throws IOException if an I/O error occurs + */ + @Override + public void writeShort(int v) throws IOException { + out.write(0xFF & v); + out.write(0xFF & (v >> 8)); + } + + @Override + public void writeUTF(String s) throws IOException { + throw new UnsupportedOperationException("modified utf-8"); + } +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java index 0469a3e8f47..f4e5e3cc2c7 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java @@ -15,86 +15,86 @@ * share a common pool and others to allocate their own. */ public final class MultiChunkPool { - - private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); - private static final ThreadLocal POOL_THREAD_LOCAL = ThreadLocal.withInitial(() -> SHARED_POOL); - - public static void enableDedicatedPoolForThisThread() { - if (POOL_THREAD_LOCAL.get() == SHARED_POOL) { - POOL_THREAD_LOCAL.set(new MultiChunkPool()); - } - } - - public static MultiChunkPool forThisThread() { - return POOL_THREAD_LOCAL.get(); - } - - private final BooleanChunkPool booleanChunkPool = new BooleanChunkPool(); - private final CharChunkPool charChunkPool = new CharChunkPool(); - private final ByteChunkPool byteChunkPool = new ByteChunkPool(); - private final ShortChunkPool shortChunkPool = new ShortChunkPool(); - private final IntChunkPool intChunkPool = new IntChunkPool(); - private final LongChunkPool longChunkPool = new LongChunkPool(); - private final FloatChunkPool floatChunkPool = new FloatChunkPool(); - private final DoubleChunkPool doubleChunkPool = new DoubleChunkPool(); - private final ObjectChunkPool objectChunkPool = new ObjectChunkPool(); - - private final Map pools; - - { - final EnumMap tempPools = new EnumMap<>(ChunkType.class); - tempPools.put(ChunkType.Boolean, booleanChunkPool); - tempPools.put(ChunkType.Char, charChunkPool); - tempPools.put(ChunkType.Byte, byteChunkPool); - tempPools.put(ChunkType.Short, shortChunkPool); - tempPools.put(ChunkType.Int, intChunkPool); - tempPools.put(ChunkType.Long, longChunkPool); - tempPools.put(ChunkType.Float, floatChunkPool); - tempPools.put(ChunkType.Double, doubleChunkPool); - tempPools.put(ChunkType.Object, objectChunkPool); - pools = Collections.unmodifiableMap(tempPools); - } - - private MultiChunkPool() {} - - @SuppressWarnings("unused") - public ChunkPool getChunkPool(@NotNull final ChunkType chunkType) { - return pools.get(chunkType); - } - - public BooleanChunkPool getBooleanChunkPool() { - return booleanChunkPool; - } - - public CharChunkPool getCharChunkPool() { - return charChunkPool; - } - - public ByteChunkPool getByteChunkPool() { - return byteChunkPool; - } - - public ShortChunkPool getShortChunkPool() { - return shortChunkPool; - } - - public IntChunkPool getIntChunkPool() { - return intChunkPool; - } - - public LongChunkPool getLongChunkPool() { - return longChunkPool; - } - - public FloatChunkPool getFloatChunkPool() { - return floatChunkPool; - } - - public DoubleChunkPool getDoubleChunkPool() { - return doubleChunkPool; - } - - public ObjectChunkPool getObjectChunkPool() { - return objectChunkPool; - } +// +// private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); +// private static final ThreadLocal POOL_THREAD_LOCAL = ThreadLocal.withInitial(() -> SHARED_POOL); +// +// public static void enableDedicatedPoolForThisThread() { +// if (POOL_THREAD_LOCAL.get() == SHARED_POOL) { +// POOL_THREAD_LOCAL.set(new MultiChunkPool()); +// } +// } +// +// public static MultiChunkPool forThisThread() { +// return POOL_THREAD_LOCAL.get(); +// } +// +// private final BooleanChunkPool booleanChunkPool = new BooleanChunkPool(); +// private final CharChunkPool charChunkPool = new CharChunkPool(); +// private final ByteChunkPool byteChunkPool = new ByteChunkPool(); +// private final ShortChunkPool shortChunkPool = new ShortChunkPool(); +// private final IntChunkPool intChunkPool = new IntChunkPool(); +// private final LongChunkPool longChunkPool = new LongChunkPool(); +// private final FloatChunkPool floatChunkPool = new FloatChunkPool(); +// private final DoubleChunkPool doubleChunkPool = new DoubleChunkPool(); +// private final ObjectChunkPool objectChunkPool = new ObjectChunkPool(); +// +// private final Map pools; +// +// { +// final EnumMap tempPools = new EnumMap<>(ChunkType.class); +// tempPools.put(ChunkType.Boolean, booleanChunkPool); +// tempPools.put(ChunkType.Char, charChunkPool); +// tempPools.put(ChunkType.Byte, byteChunkPool); +// tempPools.put(ChunkType.Short, shortChunkPool); +// tempPools.put(ChunkType.Int, intChunkPool); +// tempPools.put(ChunkType.Long, longChunkPool); +// tempPools.put(ChunkType.Float, floatChunkPool); +// tempPools.put(ChunkType.Double, doubleChunkPool); +// tempPools.put(ChunkType.Object, objectChunkPool); +// pools = Collections.unmodifiableMap(tempPools); +// } +// +// private MultiChunkPool() {} +// +// @SuppressWarnings("unused") +// public ChunkPool getChunkPool(@NotNull final ChunkType chunkType) { +// return pools.get(chunkType); +// } +// +// public BooleanChunkPool getBooleanChunkPool() { +// return booleanChunkPool; +// } +// +// public CharChunkPool getCharChunkPool() { +// return charChunkPool; +// } +// +// public ByteChunkPool getByteChunkPool() { +// return byteChunkPool; +// } +// +// public ShortChunkPool getShortChunkPool() { +// return shortChunkPool; +// } +// +// public IntChunkPool getIntChunkPool() { +// return intChunkPool; +// } +// +// public LongChunkPool getLongChunkPool() { +// return longChunkPool; +// } +// +// public FloatChunkPool getFloatChunkPool() { +// return floatChunkPool; +// } +// +// public DoubleChunkPool getDoubleChunkPool() { +// return doubleChunkPool; +// } +// +// public ObjectChunkPool getObjectChunkPool() { +// return objectChunkPool; +// } } diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java new file mode 100644 index 00000000000..5e00eeb89ef --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java @@ -0,0 +1,19 @@ +package java.io; + +public interface DataInput { + boolean readBoolean() throws IOException; + byte readByte() throws IOException; + char readChar() throws IOException; + double readDouble() throws IOException; + float readFloat() throws IOException; + void readFully(byte[] b) throws IOException; + void readFully(byte[] b, int off, int len) throws IOException; + int readInt() throws IOException; + String readLine() throws IOException; + long readLong() throws IOException; + short readShort() throws IOException; + int readUnsignedByte() throws IOException; + int readUnsignedShort() throws IOException; + String readUTF() throws IOException; + int skipBytes(int n) throws IOException; +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java new file mode 100644 index 00000000000..1cbdbc50265 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java @@ -0,0 +1,18 @@ +package java.io; + +public interface DataOutput { + void write(byte[] b) throws IOException; + void write(byte[] b, int off, int len) throws IOException; + void write(int b) throws IOException; + void writeBoolean(boolean v) throws IOException; + void writeByte(int v) throws IOException; + void writeBytes(String s) throws IOException; + void writeChar(int v) throws IOException; + void writeChars(String s) throws IOException; + void writeDouble(double v) throws IOException; + void writeFloat(float v) throws IOException; + void writeInt(int v) throws IOException; + void writeLong(long v) throws IOException; + void writeShort(int v) throws IOException; + void writeUTF(String s) throws IOException; +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java new file mode 100644 index 00000000000..1e8accf6be3 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java @@ -0,0 +1,104 @@ +package java.io; + +import org.gwtproject.nio.Numbers; + +import java.nio.charset.StandardCharsets; + +public class DataOutputStream extends FilterOutputStream implements DataOutput { + protected int written; + + public DataOutputStream(OutputStream out) { + super(out); + } + + @Override + public void write(int b) throws IOException { + super.write(b); + written++; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + super.write(b, off, len); + written += len; + } + + @Override + public void writeBoolean(boolean b) throws IOException { + write(b ? 1 : 0); + } + + @Override + public void writeByte(int i) throws IOException { + write(i); + } + + @Override + public void writeShort(int i) throws IOException { + out.write((i >> 8) & 0xFF); + out.write((i >> 0) & 0xFF); + written += 2; + } + + @Override + public void writeChar(int i) throws IOException { + out.write((i >> 8) & 0xFF); + out.write((i >> 0) & 0xFF); + written += 2; + } + + @Override + public void writeInt(int i) throws IOException { + out.write((i >> 24) & 0xFF); + out.write((i >> 16) & 0xFF); + out.write((i >> 8) & 0xFF); + out.write((i >> 0) & 0xFF); + written += 4; + } + + @Override + public void writeLong(long l) throws IOException { + out.write((int) (l >> 56) & 0xFF); + out.write((int) (l >> 48) & 0xFF); + out.write((int) (l >> 40) & 0xFF); + out.write((int) (l >> 32) & 0xFF); + out.write((int) (l >> 24) & 0xFF); + out.write((int) (l >> 16) & 0xFF); + out.write((int) (l >> 8) & 0xFF); + out.write((int) (l >> 0) & 0xFF); + written += 8; + } + + @Override + public void writeFloat(float v) throws IOException { + writeInt(Numbers.floatToIntBits(v)); + } + + @Override + public void writeDouble(double v) throws IOException { + writeLong(Numbers.doubleToRawLongBits(v)); + } + + @Override + public void writeBytes(String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + out.write(s.charAt(i) & 0xFF); + } + written += s.length(); + } + + @Override + public void writeChars(String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + out.write((c >> 8) & 0xFF); + out.write(c & 0xFF); + } + written += s.length() * 2; + } + + @Override + public void writeUTF(String s) throws IOException { + throw new UnsupportedOperationException("modified utf-8"); + } +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java new file mode 100644 index 00000000000..cb5a6fa2207 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java @@ -0,0 +1,10 @@ +package java.io; + +public class EOFException extends IOException { + public EOFException() { + } + + public EOFException(String s) { + super(s); + } +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml b/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml new file mode 100644 index 00000000000..b10ac639bc3 --- /dev/null +++ b/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml b/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml new file mode 100644 index 00000000000..ec945398599 --- /dev/null +++ b/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file From a157c15c75b2f4e72ebb2e7c9d9e6f1be7403c9a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 18 Jan 2024 15:33:57 -0600 Subject: [PATCH 008/219] Extract typed chunk pool interfaces --- .../io/deephaven/chunk/util/pools/BooleanChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/ByteChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/CharChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/DoubleChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/FloatChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/IntChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/LongChunkSoftPool.java | 2 +- .../java/io/deephaven/chunk/util/pools/ShortChunkSoftPool.java | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/BooleanChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/BooleanChunkSoftPool.java index 5abc6b31ed1..f5cb9d6de13 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/BooleanChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/BooleanChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableBooleanChunk takeWritableBooleanChunk(fi // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableBooleanChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ByteChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ByteChunkSoftPool.java index 727758718c7..4dba20d0ceb 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ByteChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ByteChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableByteChunk takeWritableByteChunk(final in // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableByteChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/CharChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/CharChunkSoftPool.java index 1648d0b38e0..3625bd188f7 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/CharChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/CharChunkSoftPool.java @@ -107,7 +107,7 @@ public WritableCharChunk takeWritableCharChunk(final in // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableCharChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/DoubleChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/DoubleChunkSoftPool.java index 0215f7eda6d..47186c1697f 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/DoubleChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/DoubleChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableDoubleChunk takeWritableDoubleChunk(fina // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableDoubleChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/FloatChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/FloatChunkSoftPool.java index 34e0cd3f749..42fc1571827 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/FloatChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/FloatChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableFloatChunk takeWritableFloatChunk(final // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableFloatChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/IntChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/IntChunkSoftPool.java index 8c0d150a4c7..b7a32ae97a1 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/IntChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/IntChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableIntChunk takeWritableIntChunk(final int // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableIntChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/LongChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/LongChunkSoftPool.java index 1cdb5df7606..83b6bd432b7 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/LongChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/LongChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableLongChunk takeWritableLongChunk(final in // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableLongChunk.makeWritableChunkForPool(capacity)); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ShortChunkSoftPool.java b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ShortChunkSoftPool.java index 7ac69c8c36c..6b7f59df665 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ShortChunkSoftPool.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/util/pools/ShortChunkSoftPool.java @@ -111,7 +111,7 @@ public WritableShortChunk takeWritableShortChunk(final // noinspection unchecked return ChunkPoolReleaseTracking.onTake(result); } - // noinspection unchecked + // noinspection return ChunkPoolReleaseTracking.onTake(WritableShortChunk.makeWritableChunkForPool(capacity)); } From 113e21bfdee58a2038b3340d3656d0730d4ec840 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 18 Jan 2024 16:27:38 -0600 Subject: [PATCH 009/219] Remove unnecessary method call --- .../java/io/deephaven/chunk/ResettableBooleanChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableByteChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableCharChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableDoubleChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableFloatChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableIntChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableLongChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableObjectChunk.java | 6 +++--- .../main/java/io/deephaven/chunk/ResettableShortChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableBooleanChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableByteChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableCharChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableDoubleChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableFloatChunk.java | 6 +++--- .../java/io/deephaven/chunk/ResettableWritableIntChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableLongChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableObjectChunk.java | 6 +++--- .../io/deephaven/chunk/ResettableWritableShortChunk.java | 6 +++--- 18 files changed, 54 insertions(+), 54 deletions(-) diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java index a0ea3d0cef8..c61f51cb6e5 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableBooleanChunk.java @@ -21,9 +21,9 @@ public class ResettableBooleanChunk implements ResettableReadOnlyChunk { public static ResettableBooleanChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableBooleanChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableBooleanChunk(); + } return new ResettableBooleanChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java index 047ab43e77e..ee6cac130fb 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableByteChunk.java @@ -21,9 +21,9 @@ public class ResettableByteChunk implements ResettableReadOnlyChunk { public static ResettableByteChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableByteChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableByteChunk(); + } return new ResettableByteChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java index 6a6106e498c..c18d43cd8b0 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableCharChunk.java @@ -17,9 +17,9 @@ public class ResettableCharChunk implements ResettableReadOnlyChunk { public static ResettableCharChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableCharChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableCharChunk(); + } return new ResettableCharChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java index 5c3a5c556ff..db9118a2e97 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableDoubleChunk.java @@ -21,9 +21,9 @@ public class ResettableDoubleChunk implements ResettableReadOnlyChunk { public static ResettableDoubleChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableDoubleChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableDoubleChunk(); + } return new ResettableDoubleChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java index eeefb014eb5..e739e35364f 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableFloatChunk.java @@ -21,9 +21,9 @@ public class ResettableFloatChunk implements ResettableReadOnlyChunk { public static ResettableFloatChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableFloatChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableFloatChunk(); + } return new ResettableFloatChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java index d1f71e47918..06af943913b 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableIntChunk.java @@ -21,9 +21,9 @@ public class ResettableIntChunk implements ResettableReadOnlyChunk { public static ResettableIntChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableIntChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableIntChunk(); + } return new ResettableIntChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java index 91b70991084..20967a341d7 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableLongChunk.java @@ -21,9 +21,9 @@ public class ResettableLongChunk implements ResettableReadOnlyChunk { public static ResettableLongChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableLongChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableLongChunk(); + } return new ResettableLongChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java index 181ce7a240a..94988281898 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableObjectChunk.java @@ -21,9 +21,9 @@ public class ResettableObjectChunk implements ResettableReadOnlyChunk { public static ResettableObjectChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableObjectChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableObjectChunk(); + } return new ResettableObjectChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java index 773e37c3c2e..3d15e10884a 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableShortChunk.java @@ -21,9 +21,9 @@ public class ResettableShortChunk implements ResettableReadOnlyChunk { public static ResettableShortChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableShortChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableShortChunk(); + } return new ResettableShortChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java index d622ee69cc1..f895aed46fa 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableBooleanChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableBooleanChunk implements ResettableWritableChunk { public static ResettableWritableBooleanChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableBooleanChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableBooleanChunk(); + } return new ResettableWritableBooleanChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java index bd0102d8fa5..3636270fb79 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableByteChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableByteChunk implements ResettableWritableChunk { public static ResettableWritableByteChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableByteChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableByteChunk(); + } return new ResettableWritableByteChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java index 36132fe46be..dd47b435998 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableCharChunk.java @@ -17,9 +17,9 @@ public class ResettableWritableCharChunk implements ResettableWritableChunk { public static ResettableWritableCharChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableCharChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableCharChunk(); + } return new ResettableWritableCharChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java index f6a17b77916..00f7a879c75 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableDoubleChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableDoubleChunk implements ResettableWritableChunk { public static ResettableWritableDoubleChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableDoubleChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableDoubleChunk(); + } return new ResettableWritableDoubleChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java index cfb93fbfe85..003165f905c 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableFloatChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableFloatChunk implements ResettableWritableChunk { public static ResettableWritableFloatChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableFloatChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableFloatChunk(); + } return new ResettableWritableFloatChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java index a456bbf49e6..170cf47e4df 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableIntChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableIntChunk implements ResettableWritableChunk { public static ResettableWritableIntChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableIntChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableIntChunk(); + } return new ResettableWritableIntChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java index e160012ccc2..7635704ce27 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableLongChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableLongChunk implements ResettableWritableChunk { public static ResettableWritableLongChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableLongChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableLongChunk(); + } return new ResettableWritableLongChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java index 9e1f64be430..9062ac29e15 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableObjectChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableObjectChunk implements ResettableWritableChunk { public static ResettableWritableObjectChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableObjectChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableObjectChunk(); + } return new ResettableWritableObjectChunk<>(); } diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java index 29e64adb82d..af93738f02e 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ResettableWritableShortChunk.java @@ -21,9 +21,9 @@ public class ResettableWritableShortChunk implements ResettableWritableChunk { public static ResettableWritableShortChunk makeResettableChunk() { - // if (POOL_RESETTABLE_CHUNKS) { - // return MultiChunkPool.forThisThread().takeResettableWritableShortChunk(); - // } + if (POOL_RESETTABLE_CHUNKS) { + return MultiChunkPool.forThisThread().takeResettableWritableShortChunk(); + } return new ResettableWritableShortChunk<>(); } From f09057fe9c55d5c865a7204f41707527b68bf8cf Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 18 Jan 2024 16:47:02 -0600 Subject: [PATCH 010/219] Update chunk pooling to use the proposed impl --- .../io/deephaven/chunk/Chunk.gwt.xml | 5 +- .../chunk/util/pools/MultiChunkPool.java | 371 ++++++++++++++---- 2 files changed, 290 insertions(+), 86 deletions(-) diff --git a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml index 1b8c6331dff..e90134fedc6 100644 --- a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml +++ b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml @@ -1,7 +1,6 @@ - - - + + \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java index f4e5e3cc2c7..5a857e855d4 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java @@ -4,6 +4,34 @@ package io.deephaven.chunk.util.pools; import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.ResettableBooleanChunk; +import io.deephaven.chunk.ResettableByteChunk; +import io.deephaven.chunk.ResettableCharChunk; +import io.deephaven.chunk.ResettableDoubleChunk; +import io.deephaven.chunk.ResettableFloatChunk; +import io.deephaven.chunk.ResettableIntChunk; +import io.deephaven.chunk.ResettableLongChunk; +import io.deephaven.chunk.ResettableObjectChunk; +import io.deephaven.chunk.ResettableShortChunk; +import io.deephaven.chunk.ResettableWritableBooleanChunk; +import io.deephaven.chunk.ResettableWritableByteChunk; +import io.deephaven.chunk.ResettableWritableCharChunk; +import io.deephaven.chunk.ResettableWritableDoubleChunk; +import io.deephaven.chunk.ResettableWritableFloatChunk; +import io.deephaven.chunk.ResettableWritableIntChunk; +import io.deephaven.chunk.ResettableWritableLongChunk; +import io.deephaven.chunk.ResettableWritableObjectChunk; +import io.deephaven.chunk.ResettableWritableShortChunk; +import io.deephaven.chunk.WritableBooleanChunk; +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableCharChunk; +import io.deephaven.chunk.WritableDoubleChunk; +import io.deephaven.chunk.WritableFloatChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.WritableShortChunk; +import io.deephaven.chunk.attributes.Any; import org.jetbrains.annotations.NotNull; import java.util.Collections; @@ -14,87 +42,264 @@ * Provides a set of per-type {@link ChunkPool}s. Normally accessed via a {@link ThreadLocal}, to allow some threads to * share a common pool and others to allocate their own. */ -public final class MultiChunkPool { -// -// private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); -// private static final ThreadLocal POOL_THREAD_LOCAL = ThreadLocal.withInitial(() -> SHARED_POOL); -// -// public static void enableDedicatedPoolForThisThread() { -// if (POOL_THREAD_LOCAL.get() == SHARED_POOL) { -// POOL_THREAD_LOCAL.set(new MultiChunkPool()); -// } -// } -// -// public static MultiChunkPool forThisThread() { -// return POOL_THREAD_LOCAL.get(); -// } -// -// private final BooleanChunkPool booleanChunkPool = new BooleanChunkPool(); -// private final CharChunkPool charChunkPool = new CharChunkPool(); -// private final ByteChunkPool byteChunkPool = new ByteChunkPool(); -// private final ShortChunkPool shortChunkPool = new ShortChunkPool(); -// private final IntChunkPool intChunkPool = new IntChunkPool(); -// private final LongChunkPool longChunkPool = new LongChunkPool(); -// private final FloatChunkPool floatChunkPool = new FloatChunkPool(); -// private final DoubleChunkPool doubleChunkPool = new DoubleChunkPool(); -// private final ObjectChunkPool objectChunkPool = new ObjectChunkPool(); -// -// private final Map pools; -// -// { -// final EnumMap tempPools = new EnumMap<>(ChunkType.class); -// tempPools.put(ChunkType.Boolean, booleanChunkPool); -// tempPools.put(ChunkType.Char, charChunkPool); -// tempPools.put(ChunkType.Byte, byteChunkPool); -// tempPools.put(ChunkType.Short, shortChunkPool); -// tempPools.put(ChunkType.Int, intChunkPool); -// tempPools.put(ChunkType.Long, longChunkPool); -// tempPools.put(ChunkType.Float, floatChunkPool); -// tempPools.put(ChunkType.Double, doubleChunkPool); -// tempPools.put(ChunkType.Object, objectChunkPool); -// pools = Collections.unmodifiableMap(tempPools); -// } -// -// private MultiChunkPool() {} -// -// @SuppressWarnings("unused") -// public ChunkPool getChunkPool(@NotNull final ChunkType chunkType) { -// return pools.get(chunkType); -// } -// -// public BooleanChunkPool getBooleanChunkPool() { -// return booleanChunkPool; -// } -// -// public CharChunkPool getCharChunkPool() { -// return charChunkPool; -// } -// -// public ByteChunkPool getByteChunkPool() { -// return byteChunkPool; -// } -// -// public ShortChunkPool getShortChunkPool() { -// return shortChunkPool; -// } -// -// public IntChunkPool getIntChunkPool() { -// return intChunkPool; -// } -// -// public LongChunkPool getLongChunkPool() { -// return longChunkPool; -// } -// -// public FloatChunkPool getFloatChunkPool() { -// return floatChunkPool; -// } -// -// public DoubleChunkPool getDoubleChunkPool() { -// return doubleChunkPool; -// } -// -// public ObjectChunkPool getObjectChunkPool() { -// return objectChunkPool; -// } +public final class MultiChunkPool implements BooleanChunkPool, ByteChunkPool, CharChunkPool, ShortChunkPool, + IntChunkPool, LongChunkPool, FloatChunkPool, DoubleChunkPool, ObjectChunkPool { + private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); + public static MultiChunkPool forThisThread() { + return SHARED_POOL; + } + + @Override + public ChunkPool asChunkPool() { + throw new UnsupportedOperationException( + "MultiChunkPool can't create a ChunkPool, call this on the specific type required"); + } + + @Override + public WritableBooleanChunk takeWritableBooleanChunk(int capacity) { + return WritableBooleanChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableBooleanChunk(@NotNull WritableBooleanChunk writableBooleanChunk) { + } + + @Override + public ResettableBooleanChunk takeResettableBooleanChunk() { + return ResettableBooleanChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableBooleanChunk(@NotNull ResettableBooleanChunk resettableBooleanChunk) { + } + + @Override + public ResettableWritableBooleanChunk takeResettableWritableBooleanChunk() { + return ResettableWritableBooleanChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableBooleanChunk( + @NotNull ResettableWritableBooleanChunk resettableWritableBooleanChunk) { + } + + @Override + public WritableCharChunk takeWritableCharChunk(int capacity) { + return WritableCharChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableCharChunk(@NotNull WritableCharChunk writableCharChunk) { + } + + @Override + public ResettableCharChunk takeResettableCharChunk() { + return ResettableCharChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableCharChunk(@NotNull ResettableCharChunk resettableCharChunk) { + } + + @Override + public ResettableWritableCharChunk takeResettableWritableCharChunk() { + return ResettableWritableCharChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableCharChunk(@NotNull ResettableWritableCharChunk resettableWritableCharChunk) { + } + + @Override + public WritableByteChunk takeWritableByteChunk(int capacity) { + return WritableByteChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableByteChunk(@NotNull WritableByteChunk writableByteChunk) { + } + + @Override + public ResettableByteChunk takeResettableByteChunk() { + return ResettableByteChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableByteChunk(@NotNull ResettableByteChunk resettableByteChunk) { + } + + @Override + public ResettableWritableByteChunk takeResettableWritableByteChunk() { + return ResettableWritableByteChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableByteChunk(@NotNull ResettableWritableByteChunk resettableWritableByteChunk) { + } + + @Override + public WritableShortChunk takeWritableShortChunk(int capacity) { + return WritableShortChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableShortChunk(@NotNull WritableShortChunk writableShortChunk) { + } + + @Override + public ResettableShortChunk takeResettableShortChunk() { + return ResettableShortChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableShortChunk(@NotNull ResettableShortChunk resettableShortChunk) { + } + + @Override + public ResettableWritableShortChunk takeResettableWritableShortChunk() { + return ResettableWritableShortChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableShortChunk( + @NotNull ResettableWritableShortChunk resettableWritableShortChunk) { + } + + @Override + public WritableIntChunk takeWritableIntChunk(int capacity) { + return WritableIntChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableIntChunk(@NotNull WritableIntChunk writableIntChunk) { + } + + @Override + public ResettableIntChunk takeResettableIntChunk() { + return ResettableIntChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableIntChunk(@NotNull ResettableIntChunk resettableIntChunk) { + } + + @Override + public ResettableWritableIntChunk takeResettableWritableIntChunk() { + return ResettableWritableIntChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableIntChunk(@NotNull ResettableWritableIntChunk resettableWritableIntChunk) { + } + + @Override + public WritableLongChunk takeWritableLongChunk(int capacity) { + return WritableLongChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableLongChunk(@NotNull WritableLongChunk writableLongChunk) { + } + + @Override + public ResettableLongChunk takeResettableLongChunk() { + return ResettableLongChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableLongChunk(@NotNull ResettableLongChunk resettableLongChunk) { + } + + @Override + public ResettableWritableLongChunk takeResettableWritableLongChunk() { + return ResettableWritableLongChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableLongChunk(@NotNull ResettableWritableLongChunk resettableWritableLongChunk) { + } + + @Override + public WritableFloatChunk takeWritableFloatChunk(int capacity) { + return WritableFloatChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableFloatChunk(@NotNull WritableFloatChunk writableFloatChunk) { + } + + @Override + public ResettableFloatChunk takeResettableFloatChunk() { + return ResettableFloatChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableFloatChunk(@NotNull ResettableFloatChunk resettableFloatChunk) { + } + + @Override + public ResettableWritableFloatChunk takeResettableWritableFloatChunk() { + return ResettableWritableFloatChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableFloatChunk( + @NotNull ResettableWritableFloatChunk resettableWritableFloatChunk) { + } + + @Override + public WritableDoubleChunk takeWritableDoubleChunk(int capacity) { + return WritableDoubleChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableDoubleChunk(@NotNull WritableDoubleChunk writableDoubleChunk) { + } + + @Override + public ResettableDoubleChunk takeResettableDoubleChunk() { + return ResettableDoubleChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableDoubleChunk(@NotNull ResettableDoubleChunk resettableDoubleChunk) { + } + + @Override + public ResettableWritableDoubleChunk takeResettableWritableDoubleChunk() { + return ResettableWritableDoubleChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableDoubleChunk( + @NotNull ResettableWritableDoubleChunk resettableWritableDoubleChunk) { + } + + @Override + public WritableObjectChunk takeWritableObjectChunk(int capacity) { + return WritableObjectChunk.makeWritableChunkForPool(capacity); + } + + @Override + public void giveWritableObjectChunk(@NotNull WritableObjectChunk writableObjectChunk) { + } + + @Override + public ResettableObjectChunk takeResettableObjectChunk() { + return ResettableObjectChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableObjectChunk(@NotNull ResettableObjectChunk resettableObjectChunk) { + } + + @Override + public ResettableWritableObjectChunk takeResettableWritableObjectChunk() { + return ResettableWritableObjectChunk.makeResettableChunkForPool(); + } + + @Override + public void giveResettableWritableObjectChunk( + @NotNull ResettableWritableObjectChunk resettableWritableObjectChunk) { + } } From d72321ec362813d3106246f037d1d5e00a80c4a5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 19 Jan 2024 09:44:05 -0600 Subject: [PATCH 011/219] Remove trove usage from RowSet itself --- .../io/deephaven/engine/rowset/RowSet.java | 10 - .../rowset/impl/WritableRowSetImpl.java | 6 - .../table/impl/TableUpdateValidator.java | 8 +- .../impl/sources/ArrayBackedColumnSource.java | 4 +- .../sources/NanosBasedTimeArraySource.java | 4 +- .../NanosBasedTimeSparseArraySource.java | 4 +- .../impl/sources/NullValueColumnSource.java | 4 +- .../impl/sources/SingleValueColumnSource.java | 6 +- .../impl/sources/SparseArrayColumnSource.java | 4 +- .../ImmutableConstantByteSource.java | 4 +- .../ImmutableConstantCharSource.java | 4 +- .../ImmutableConstantDoubleSource.java | 4 +- .../ImmutableConstantFloatSource.java | 4 +- .../immutable/ImmutableConstantIntSource.java | 4 +- .../ImmutableConstantLongSource.java | 4 +- ...ImmutableConstantNanosBasedTimeSource.java | 4 +- .../ImmutableConstantObjectSource.java | 4 +- .../ImmutableConstantShortSource.java | 4 +- .../table/impl/util/RowSetShiftCallback.java | 7 + .../engine/table/impl/util/ShiftCallback.java | 5 + .../engine/table/impl/util/ShiftData.java | 142 --------- .../engine/table/impl/util/ShiftDataTest.java | 271 ------------------ 22 files changed, 45 insertions(+), 466 deletions(-) create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java create mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java delete mode 100644 engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java delete mode 100644 engine/table/src/test/java/io/deephaven/engine/table/impl/util/ShiftDataTest.java diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSet.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSet.java index 7236f613329..f4c6885d133 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSet.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSet.java @@ -3,7 +3,6 @@ // package io.deephaven.engine.rowset; -import gnu.trove.list.array.TLongArrayList; import io.deephaven.base.log.LogOutputAppendable; import io.deephaven.util.SafeCloseable; import io.deephaven.util.datastructures.LongAbortableConsumer; @@ -108,15 +107,6 @@ default WritableRowSet invert(RowSet keys) { */ WritableRowSet invert(RowSet keys, long maximumPosition); - /** - * For the given keys RowSet, under the assertion that none of them are present in the current RowSet, return the - * tentative insertion points in the current RowSet with the count for each of them - * - * @param keys the keys to identify insertion locations - * @return two TLongArrayLists; [0] contains the positions, [1] contains the counts. - */ - TLongArrayList[] findMissing(RowSet keys); - /** * Returns a new RowSet representing the intersection of the current RowSet with the input RowSet */ diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/WritableRowSetImpl.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/WritableRowSetImpl.java index e97b9112450..c6d3c60e64b 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/WritableRowSetImpl.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/WritableRowSetImpl.java @@ -3,7 +3,6 @@ // package io.deephaven.engine.rowset.impl; -import gnu.trove.list.array.TLongArrayList; import io.deephaven.base.log.LogOutput; import io.deephaven.base.verify.Assert; import io.deephaven.engine.rowset.*; @@ -260,11 +259,6 @@ public final WritableRowSet invert(final RowSet keys, final long maximumPosition return new WritableRowSetImpl(innerSet.ixInvertOnNew(getInnerSet(keys), maximumPosition)); } - @Override - public final TLongArrayList[] findMissing(final RowSet keys) { - return RowSetUtils.findMissing(this, keys); - } - @NotNull @Override public final WritableRowSet intersect(@NotNull final RowSet range) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java index 1d4dd3ae586..eb825039e81 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableUpdateValidator.java @@ -10,7 +10,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.hashing.ChunkEquals; import io.deephaven.configuration.Configuration; -import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetShiftData; @@ -18,7 +17,7 @@ import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.sources.SparseArrayColumnSource; import io.deephaven.engine.table.impl.util.ChunkUtils; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.RowSetShiftCallback; import io.deephaven.util.SafeCloseable; import io.deephaven.util.SafeCloseableList; import io.deephaven.vector.*; @@ -353,7 +352,7 @@ private ColumnInfo(QueryTable tableToValidate, String columnName) { this.isPrimitive = source.getType().isPrimitive(); this.expectedSource = SparseArrayColumnSource.getSparseMemoryColumnSource(source.getType(), source.getComponentType()); - Assert.eqTrue(this.expectedSource instanceof ShiftData.RowSetShiftCallback, + Assert.eqTrue(this.expectedSource instanceof RowSetShiftCallback, "expectedSource instanceof ShiftData.RowSetShiftCallback"); this.chunkEquals = ChunkEquals.makeEqual(source.getChunkType()); @@ -403,8 +402,7 @@ private WritableBooleanChunk equalValuesDest() { @Override public void shift(final long beginRange, final long endRange, final long shiftDelta) { - ((ShiftData.RowSetShiftCallback) expectedSource).shift( - rowSet.subSetByKeyRange(beginRange, endRange), shiftDelta); + ((RowSetShiftCallback) expectedSource).shift(rowSet.subSetByKeyRange(beginRange, endRange), shiftDelta); } public void remove(final RowSet toRemove) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java index afccc52cb1f..3d95a82b133 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArrayBackedColumnSource.java @@ -6,6 +6,7 @@ import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.table.impl.util.ShiftCallback; import io.deephaven.qst.type.BoxedType; import io.deephaven.qst.type.GenericType; import io.deephaven.util.type.ArrayTypeUtils; @@ -13,7 +14,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSequenceFactory; -import io.deephaven.engine.table.impl.util.ShiftData; import io.deephaven.qst.array.Array; import io.deephaven.qst.array.BooleanArray; import io.deephaven.qst.array.ByteArray; @@ -46,7 +46,7 @@ */ public abstract class ArrayBackedColumnSource extends AbstractDeferredGroupingColumnSource - implements FillUnordered, ShiftData.ShiftCallback, WritableColumnSource, InMemoryColumnSource, + implements FillUnordered, ShiftCallback, WritableColumnSource, InMemoryColumnSource, ChunkedBackingStoreExposedWritableSource { static final int DEFAULT_RECYCLER_CAPACITY = 1024; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java index 94ca377919d..7e8fbfa92fd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeArraySource.java @@ -15,13 +15,13 @@ import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import java.time.*; public abstract class NanosBasedTimeArraySource extends AbstractDeferredGroupingColumnSource - implements FillUnordered, ShiftData.ShiftCallback, WritableColumnSource, + implements FillUnordered, ShiftCallback, WritableColumnSource, InMemoryColumnSource, WritableSourceWithPrepareForParallelPopulation, ConvertibleTimeSource { protected final LongArraySource nanoSource; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java index 38927d583d2..fb329955deb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NanosBasedTimeSparseArraySource.java @@ -15,7 +15,7 @@ import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.RowSetShiftCallback; import org.jetbrains.annotations.NotNull; import java.time.Instant; @@ -29,7 +29,7 @@ */ public abstract class NanosBasedTimeSparseArraySource extends AbstractDeferredGroupingColumnSource implements FillUnordered, WritableColumnSource, InMemoryColumnSource, - PossiblyImmutableColumnSource, WritableSourceWithPrepareForParallelPopulation, ShiftData.RowSetShiftCallback, + PossiblyImmutableColumnSource, WritableSourceWithPrepareForParallelPopulation, RowSetShiftCallback, ConvertibleTimeSource { protected final LongSparseArraySource nanoSource; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NullValueColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NullValueColumnSource.java index aec4b545db7..4182b8b8087 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NullValueColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/NullValueColumnSource.java @@ -15,10 +15,10 @@ import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.AbstractColumnSource; +import io.deephaven.engine.table.impl.util.ShiftCallback; import io.deephaven.hash.KeyedObjectHashMap; import io.deephaven.hash.KeyedObjectKey; import io.deephaven.chunk.WritableChunk; -import io.deephaven.engine.table.impl.util.ShiftData; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -37,7 +37,7 @@ * A column source that returns null for all keys. Trivially "writable" since it can only contain null values. */ public final class NullValueColumnSource extends AbstractColumnSource - implements ShiftData.ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource, + implements ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource, WritableColumnSource { private static final KeyedObjectKey.Basic, Class>, NullValueColumnSource> KEY_TYPE = diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SingleValueColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SingleValueColumnSource.java index dadb09f9be4..3da4b4952bc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SingleValueColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SingleValueColumnSource.java @@ -8,12 +8,10 @@ import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.AbstractColumnSource; -import io.deephaven.engine.table.impl.util.ShiftData; - -import static io.deephaven.util.QueryConstants.NULL_BYTE; +import io.deephaven.engine.table.impl.util.ShiftCallback; public abstract class SingleValueColumnSource extends AbstractColumnSource - implements WritableColumnSource, ChunkSink, ShiftData.ShiftCallback, InMemoryColumnSource, + implements WritableColumnSource, ChunkSink, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource { protected transient long changeTime; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java index 05d895b3eb4..6f4e05e1f3f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SparseArrayColumnSource.java @@ -7,7 +7,7 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.RowSetShiftCallback; import io.deephaven.util.type.ArrayTypeUtils; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.attributes.Values; @@ -73,7 +73,7 @@ public abstract class SparseArrayColumnSource extends AbstractDeferredGroupingColumnSource implements FillUnordered, WritableColumnSource, InMemoryColumnSource, PossiblyImmutableColumnSource, - WritableSourceWithPrepareForParallelPopulation, ShiftData.RowSetShiftCallback { + WritableSourceWithPrepareForParallelPopulation, RowSetShiftCallback { static final int DEFAULT_RECYCLER_CAPACITY = 1024; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java index 6e8b5e8a813..7fbdabe198f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantByteSource.java @@ -18,7 +18,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -32,7 +32,7 @@ */ public class ImmutableConstantByteSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForByte, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForByte, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final byte value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java index 0788b2cb9c3..8731a353cdc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantCharSource.java @@ -12,7 +12,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -26,7 +26,7 @@ */ public class ImmutableConstantCharSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForChar, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForChar, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final char value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java index c75d5f2faba..a9c07d7a8d4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantDoubleSource.java @@ -16,7 +16,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -30,7 +30,7 @@ */ public class ImmutableConstantDoubleSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForDouble, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForDouble, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final double value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java index 81e961dbfec..6a1761e8c75 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantFloatSource.java @@ -16,7 +16,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -30,7 +30,7 @@ */ public class ImmutableConstantFloatSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForFloat, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForFloat, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final float value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java index ea4a6a8a82c..a1af8ec8b80 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantIntSource.java @@ -16,7 +16,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -30,7 +30,7 @@ */ public class ImmutableConstantIntSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForInt, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForInt, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final int value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java index 7d9e322f6d3..702a2c33f14 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantLongSource.java @@ -25,7 +25,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -39,7 +39,7 @@ */ public class ImmutableConstantLongSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForLong, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForLong, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource , ConvertibleTimeSource { private final long value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java index 075b786b412..e91ceb5645c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantNanosBasedTimeSource.java @@ -13,7 +13,7 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import java.time.Instant; @@ -23,7 +23,7 @@ import java.time.ZonedDateTime; public abstract class ImmutableConstantNanosBasedTimeSource extends AbstractColumnSource - implements ShiftData.ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource, + implements ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource, ConvertibleTimeSource { protected final ImmutableConstantLongSource nanoSource; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java index 5971e14e61f..0f776fc6a1e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantObjectSource.java @@ -16,7 +16,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -29,7 +29,7 @@ */ public class ImmutableConstantObjectSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForObject, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForObject, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final T value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java index cdf53c08c05..6a7e57d8e91 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/immutable/ImmutableConstantShortSource.java @@ -16,7 +16,7 @@ import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.ImmutableColumnSourceGetDefaults; import io.deephaven.engine.table.impl.sources.*; -import io.deephaven.engine.table.impl.util.ShiftData; +import io.deephaven.engine.table.impl.util.ShiftCallback; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -30,7 +30,7 @@ */ public class ImmutableConstantShortSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForShort, ShiftData.ShiftCallback, InMemoryColumnSource, + implements ImmutableColumnSourceGetDefaults.ForShort, ShiftCallback, InMemoryColumnSource, RowKeyAgnosticChunkSource /* MIXIN_IMPLS */ { private final short value; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java new file mode 100644 index 00000000000..01a952e9b54 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java @@ -0,0 +1,7 @@ +package io.deephaven.engine.table.impl.util; + +import io.deephaven.engine.rowset.RowSet; + +public interface RowSetShiftCallback { + void shift(RowSet rowSet, long offset); +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java new file mode 100644 index 00000000000..a45f707c675 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java @@ -0,0 +1,5 @@ +package io.deephaven.engine.table.impl.util; + +public interface ShiftCallback { + void shift(long start, long end, long offset); +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java deleted file mode 100644 index 1e3f9859561..00000000000 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftData.java +++ /dev/null @@ -1,142 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.engine.table.impl.util; - -import gnu.trove.list.TLongList; -import gnu.trove.list.array.TLongArrayList; -import io.deephaven.engine.rowset.RowSet; - -public class ShiftData { - - private final TLongArrayList startIndex; - private final TLongArrayList endIndex; - private final TLongArrayList offsets; - private final int size; - private int runningSize = 0; - private long runningOffset = 0; - - public RowSet getAddedPos() { - return addedPos; - } - - private RowSet addedPos; - - public ShiftData(RowSet rowSet, RowSet removed, RowSet added) { - TLongList[] removedKeys = rowSet.findMissing(removed); - addedPos = rowSet.invert(added); - endIndex = new TLongArrayList(); - startIndex = new TLongArrayList(); - offsets = new TLongArrayList(); - int removedIndex = 0; - TLongList removedPositions = removedKeys[0]; - TLongList removedCount = removedKeys[1]; - for (RowSet.RangeIterator addedIt = addedPos.rangeIterator(); addedIt.hasNext();) { - addedIt.next(); - int startOffset = (int) addedIt.currentRangeStart(); - int endOffset = (int) addedIt.currentRangeEnd(); - while (removedIndex < removedPositions.size() && removedPositions.get(removedIndex) < startOffset) { - removeRange(removedPositions.get(removedIndex), removedCount.get(removedIndex)); - removedIndex++; - } - int deleteCount = 0; - while (removedIndex < removedPositions.size() && removedPositions.get(removedIndex) <= endOffset) { - deleteCount += removedCount.get(removedIndex); - removedIndex++; - } - addRange(startOffset, endOffset, deleteCount); - } - while (removedIndex < removedPositions.size()) { - removeRange(removedPositions.get(removedIndex), removedCount.get(removedIndex)); - removedIndex++; - } - if (runningSize > 0) { - if (startIndex.get(runningSize - 1) <= (rowSet.size() - added.size() + removed.size() - 1)) { - endIndex.set(runningSize - 1, (int) (rowSet.size() - added.size() + removed.size() - 1)); - } else { - runningSize--; - } - } - size = runningSize; - } - - void addRange(long firstIndex, long lastIndex, long deletionCount) { - if (lastIndex - firstIndex + 1 == deletionCount) { - return; - } - if (runningSize > 0) { - endIndex.set(runningSize - 1, firstIndex - runningOffset - 1); - } - - - long newStartIndex = firstIndex + deletionCount - runningOffset; - runningOffset = lastIndex + runningOffset + 1 - (deletionCount + firstIndex); - - if (runningSize > 0 && ((newStartIndex + runningOffset) == (startIndex.get(runningSize - 1) - + offsets.get(runningSize - 1)))) { - startIndex.set(runningSize - 1, newStartIndex); - offsets.set(runningSize - 1, runningOffset); - } else { - startIndex.add(newStartIndex); - offsets.add(runningOffset); - endIndex.add(0); - runningSize++; - } - } - - void removeRange(long firstIndex, long count) { - if (runningSize > 0) { - endIndex.set(runningSize - 1, firstIndex - runningOffset - 1); - } - - long newStartIndex = firstIndex - runningOffset + count; - runningOffset = runningOffset - count; - - if (runningSize > 0 - && (newStartIndex + runningOffset == startIndex.get(runningSize - 1) + offsets.get(runningSize - 1))) { - startIndex.set(runningSize - 1, newStartIndex); - offsets.set(runningSize - 1, runningOffset); - } else { - startIndex.add(newStartIndex); - offsets.add(runningOffset); - endIndex.add(0); - runningSize++; - } - } - - public interface ShiftCallback { - void shift(long start, long end, long offset); - } - - public interface RowSetShiftCallback { - void shift(RowSet rowSet, long offset); - } - - public void applyDataShift(ShiftCallback shiftCallback) { - int startPos = 0; - int currentPos = 0; - while (currentPos < size) { - if (offsets.get(startPos) > 0) { - while (currentPos < size && offsets.get(currentPos) > 0) { - currentPos++; - } - for (int ii = currentPos - 1; ii >= startPos; ii--) { - shiftCallback.shift(startIndex.get(ii), endIndex.get(ii), offsets.get(ii)); - } - } else if (offsets.get(startPos) < 0) { - while (currentPos < size && offsets.get(currentPos) < 0) { - currentPos++; - } - for (int ii = startPos; ii < currentPos; ii++) { - shiftCallback.shift(startIndex.get(ii), endIndex.get(ii), offsets.get(ii)); - } - } else { - while (currentPos < size && offsets.get(currentPos) == 0) { - currentPos++; - } - } - startPos = currentPos; - } - } - -} diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/ShiftDataTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/ShiftDataTest.java deleted file mode 100644 index 1e3573b9567..00000000000 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/ShiftDataTest.java +++ /dev/null @@ -1,271 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.engine.table.impl.util; - -import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.rowset.RowSetBuilderRandom; -import io.deephaven.engine.rowset.RowSetBuilderSequential; -import io.deephaven.engine.rowset.RowSetFactory; -import junit.framework.TestCase; - -import java.util.*; - -public class ShiftDataTest extends TestCase { - - public void testSystematic() { - RowSet rowSet = getSortedIndex(); - RowSet removed = getSortedIndex(); - RowSet added = getSortedIndex(); - ShiftData shiftData = new ShiftData(rowSet, removed, added); - shiftData.applyDataShift(new ShiftData.ShiftCallback() { - @Override - public void shift(long start, long end, long offset) { - assertTrue("Should not call it", false); - } - }); - rowSet = getSortedIndex(1L); - testNoNotification(rowSet, removed, added); - added = getSortedIndex(1L); - testNoNotification(rowSet, removed, added); - rowSet = getSortedIndex(1L, 2); - added = getSortedIndex(1L, 2); - testNoNotification(rowSet, removed, added); - - rowSet = getSortedIndex(1L, 2, 3); - added = getSortedIndex(2, 3); - testNoNotification(rowSet, removed, added); - - removed = getSortedIndex(4, 5); - testNoNotification(rowSet, removed, added); - - rowSet = getSortedIndex(); - added = getSortedIndex(); - removed = getSortedIndex(4, 5); - testNoNotification(rowSet, removed, added); - - rowSet = getSortedIndex(1L, 2, 4); - added = getSortedIndex(2); - removed = getSortedIndex(3); - testNoNotification(rowSet, removed, added); - - rowSet = getSortedIndex(1L, 3, 4); - added = getSortedIndex(3); - removed = getSortedIndex(2); - testNoNotification(rowSet, removed, added); - - rowSet = getSortedIndex(); - added = getSortedIndex(); - removed = getSortedIndex(1, 2, 4); - testNoNotification(rowSet, removed, added); - - - rowSet = getSortedIndex(4); - added = getSortedIndex(); - removed = getSortedIndex(1); - checkExpectations(rowSet, removed, added, new long[][] {{1, 1, -1}}); - - rowSet = getSortedIndex(4, 5); - added = getSortedIndex(4); - removed = getSortedIndex(); - checkExpectations(rowSet, removed, added, new long[][] {{0, 0, 1}}); - - rowSet = getSortedIndex(4, 5, 6, 7); - added = getSortedIndex(4); - removed = getSortedIndex(); - checkExpectations(rowSet, removed, added, new long[][] {{0, 2, 1}}); - - rowSet = getSortedIndex(4, 5, 6, 7); - added = getSortedIndex(4, 5); - removed = getSortedIndex(); - checkExpectations(rowSet, removed, added, new long[][] {{0, 1, 2}}); - - rowSet = getSortedIndex(4, 5, 6, 7); - added = getSortedIndex(5, 6); - removed = getSortedIndex(); - checkExpectations(rowSet, removed, added, new long[][] {{1, 1, 2}}); - // was 1,4,7 - rowSet = getSortedIndex(4, 5, 6, 7); - added = getSortedIndex(5, 6); - removed = getSortedIndex(1); - checkExpectations(rowSet, removed, added, new long[][] {{1, 1, -1}, {2, 2, 1}}); - - // was 1,2,4,6,7 - rowSet = getSortedIndex(4, 5, 6, 7); - added = getSortedIndex(5); - removed = getSortedIndex(1, 2); - checkExpectations(rowSet, removed, added, new long[][] {{2, 2, -2}, {3, 4, -1}}); - - // was 6,7,9,11 - rowSet = getSortedIndex(4, 5, 9, 10, 11); - added = getSortedIndex(4, 5, 10); - removed = getSortedIndex(6, 7); - checkExpectations(rowSet, removed, added, new long[][] {{3, 3, 1}}); - - // was 6,7,9,11 - rowSet = getSortedIndex(4, 9, 10, 11); - added = getSortedIndex(4, 10); - removed = getSortedIndex(6, 7); - checkExpectations(rowSet, removed, added, new long[][] {{2, 2, -1}}); - - // was 2,4,6,8 - rowSet = getSortedIndex(1, 2, 3, 4, 5, 6, 7, 8); - added = getSortedIndex(1, 3, 5, 7); - removed = getSortedIndex(); - checkExpectations(rowSet, removed, added, new long[][] {{3, 3, 4}, {2, 2, 3}, {1, 1, 2}, {0, 0, 1}}); - - // was 2,4,6,8,10,12,16 - rowSet = getSortedIndex(1, 2, 3, 4, 8, 16); - added = getSortedIndex(1, 3); - removed = getSortedIndex(6, 10, 12); - checkExpectations(rowSet, removed, added, new long[][] {{3, 3, 1}, {1, 1, 2}, {0, 0, 1}, {6, 6, -1}}); - - // was 100,200,300,400,500,600,700 - rowSet = getSortedIndex(100, 200, 230, 240, 250, 260, 270, 500, 550, 700); - added = getSortedIndex(230, 240, 250, 260, 270, 550); - removed = getSortedIndex(300, 400, 600); - checkExpectations(rowSet, removed, added, new long[][] {{6, 6, 3}, {4, 4, 3}}); - } - - private void checkExpectations(RowSet rowSet, RowSet removed, RowSet added, long[][] expected) { - ShiftData shiftData; - class Expectations implements ShiftData.ShiftCallback { - - private final long[][] expected; - private int i = 0; - - Expectations(long[][] expected) { - this.expected = expected; - } - - @Override - public void shift(long start, long end, long offset) { - long[] current = expected[i++]; - assertEquals(current[0], start); - assertEquals(current[1], end); - assertEquals(current[2], offset); - } - - public void allMet() { - assertEquals(i, expected.length); - } - } - shiftData = new ShiftData(rowSet, removed, added); - final Expectations expectations = new Expectations(expected); - shiftData.applyDataShift(expectations); - expectations.allMet(); - } - - private void testNoNotification(RowSet rowSet, RowSet removed, RowSet added) { - ShiftData shiftData; - shiftData = new ShiftData(rowSet, removed, added); - shiftData.applyDataShift(new ShiftData.ShiftCallback() { - @Override - public void shift(long start, long end, long offset) { - assertTrue("Should not call it", false); - } - }); - } - - Random random = new Random(123); - - public void testRandom() { - for (int k = 0; k < 100; k++) { - RowSet initialRowSet = getBaseIndex(100, 10); - RowSet added = getRandomIndex(20, 1, 10); - RowSet removed = getRandomRemoves(initialRowSet, 2); - RowSet finalRowSet = getFinalIndex(initialRowSet, added, removed); - final long resultKeys[] = new long[(int) Math.max(initialRowSet.size(), finalRowSet.size())]; - int pos = 0; - for (RowSet.Iterator it = initialRowSet.iterator(); it.hasNext();) { - resultKeys[pos++] = it.nextLong(); - } - ShiftData shiftData = new ShiftData(finalRowSet, removed, added); - shiftData.applyDataShift(new ShiftData.ShiftCallback() { - @Override - public void shift(long start, long end, long offset) { - if (offset > 0) { - for (int i = (int) end; i >= start; i--) { - resultKeys[((int) (i + offset))] = resultKeys[i]; - } - } else { - for (int i = (int) start; i <= end; i++) { - resultKeys[((int) (i + offset))] = resultKeys[i]; - } - } - } - }); - RowSet addedPos = shiftData.getAddedPos(); - - for (RowSet.Iterator iterator = addedPos.iterator(), valueIt = added.iterator(); iterator.hasNext();) { - resultKeys[((int) iterator.nextLong())] = valueIt.nextLong(); - } - - pos = 0; - for (RowSet.Iterator iterator = finalRowSet.iterator(); iterator.hasNext();) { - assertEquals(iterator.nextLong(), resultKeys[pos++]); - } - } - } - - private RowSet getFinalIndex(RowSet initialRowSet, RowSet added, RowSet removed) { - TreeSet finalKeys = new TreeSet(); - for (RowSet.Iterator iterator = initialRowSet.iterator(); iterator.hasNext();) { - Long next = iterator.nextLong(); - finalKeys.add(next); - } - for (RowSet.Iterator iterator = removed.iterator(); iterator.hasNext();) { - Long next = iterator.nextLong(); - finalKeys.remove(next); - } - for (RowSet.Iterator iterator = added.iterator(); iterator.hasNext();) { - Long next = iterator.nextLong(); - finalKeys.add(next); - } - RowSetBuilderRandom builder = RowSetFactory.builderRandom(); - for (Long finalKey : finalKeys) { - builder.addKey(finalKey); - } - return builder.build(); - } - - private RowSet getRandomRemoves(RowSet rowSet, int prob) { - RowSetBuilderRandom builder = RowSetFactory.builderRandom(); - for (RowSet.Iterator iterator = rowSet.iterator(); iterator.hasNext();) { - long next = iterator.nextLong(); - if (random.nextInt(prob) == 0) { - builder.addKey(next); - } - } - return builder.build(); - } - - - private RowSet getBaseIndex(int base, int size) { - RowSetBuilderSequential builder = RowSetFactory.builderSequential(); - for (int i = 0; i < size; i++) { - builder.appendKey(i * size); - } - return builder.build(); - } - - private RowSet getRandomIndex(int base, int offset, int size) { - RowSetBuilderRandom builder = RowSetFactory.builderRandom(); - for (int i = 0; i < size; i++) { - if (random.nextInt(2) == 0) { - builder.addKey(i * base + offset); - } - } - return builder.build(); - } - - - protected RowSet getSortedIndex(long... keys) { - RowSetBuilderRandom builder = RowSetFactory.builderRandom(); - for (long key : keys) { - builder.addKey(key); - } - return builder.build(); - } - -} From ab1a1e616c12839a47fc00bf4e254e00a6d8fbfe Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 19 Jan 2024 09:45:36 -0600 Subject: [PATCH 012/219] Delete additional unused trove --- .../engine/rowset/RowSetBuilderSequential.java | 9 +-------- .../deephaven/engine/rowset/RowSetFactory.java | 17 ----------------- 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetBuilderSequential.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetBuilderSequential.java index e678a514e51..aad8cad51ba 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetBuilderSequential.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetBuilderSequential.java @@ -3,7 +3,6 @@ // package io.deephaven.engine.rowset; -import gnu.trove.procedure.TLongProcedure; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.util.datastructures.LongRangeConsumer; @@ -17,7 +16,7 @@ /** * Builder interface for {@link RowSet} construction in strict sequential order. */ -public interface RowSetBuilderSequential extends TLongProcedure, LongRangeConsumer { +public interface RowSetBuilderSequential extends LongRangeConsumer { /** * Hint to call, but if called, (a) should be called before providing any values, and (b) no value should be @@ -61,12 +60,6 @@ default void appendOrderedRowKeyRangesChunk(final LongChunk appendRanges(new LongChunkRangeIterator(chunk)); } - @Override - default boolean execute(final long value) { - appendKey(value); - return true; - } - /** * Appends a {@link RowSequence} to this builder. * diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetFactory.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetFactory.java index 78c620c661b..a6b7d6dd91e 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetFactory.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetFactory.java @@ -3,12 +3,10 @@ // package io.deephaven.engine.rowset; -import gnu.trove.list.TLongList; import io.deephaven.engine.rowset.impl.AdaptiveRowSetBuilderRandom; import io.deephaven.engine.rowset.impl.BasicRowSetBuilderSequential; import io.deephaven.engine.rowset.impl.WritableRowSetImpl; import io.deephaven.engine.rowset.impl.singlerange.SingleRange; -import org.jetbrains.annotations.NotNull; /** * Repository of factory methods for constructing {@link WritableRowSet row sets}. @@ -56,21 +54,6 @@ public static WritableRowSet fromKeys(final long rowKey) { return fromRange(rowKey, rowKey); } - /** - * Get a {@link WritableRowSet} containing the specified row keys. - *

- * The provided {@link TLongList} is sorted and then passed to a {@link RowSetBuilderSequential}. - * - * @param rowKeys A {@link TLongList}. Note that this list is mutated within the method! - * @return A new {@link WritableRowSet} containing the values from {@code rowKeys} - */ - public static RowSet fromKeys(@NotNull final TLongList rowKeys) { - rowKeys.sort(); - final RowSetBuilderSequential builder = builderSequential(); - rowKeys.forEach(builder); - return builder.build(); - } - /** * Create a {@link WritableRowSet} containing the continuous range [firstRowKey, lastRowKey]. * From ac6cc129221cbe66f97b23498636eaae5f9b9e14 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 6 Feb 2024 16:26:26 -0600 Subject: [PATCH 013/219] Quick and dirty stab at reference emul --- .../io/deephaven/web/client/fu/JsWeakRef.java | 13 +++++++ .../web/super/java/lang/ref/Reference.java | 37 +++++++++++++++++++ .../super/java/lang/ref/ReferenceQueue.java | 5 +++ .../super/java/lang/ref/SoftReference.java | 10 +++++ 4 files changed, 65 insertions(+) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/SoftReference.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java new file mode 100644 index 00000000000..0bfb8f1fd30 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java @@ -0,0 +1,13 @@ +package io.deephaven.web.client.fu; + +import jsinterop.annotations.JsPackage; +import jsinterop.annotations.JsType; + +@JsType(namespace = JsPackage.GLOBAL, name = "WeakRef", isNative = true) +public class JsWeakRef { + public JsWeakRef(T target) { + + } + + public native T deref(); +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java new file mode 100644 index 00000000000..4570799b802 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java @@ -0,0 +1,37 @@ +package java.lang.ref; + +import io.deephaven.web.client.fu.JsWeakRef; + +public abstract class Reference { + + private JsWeakRef jsWeakRef; + + Reference(T referent) { + this(referent, (ReferenceQueue) null); + } + + Reference(T referent, ReferenceQueue queue) { + jsWeakRef = new JsWeakRef<>(referent); + } + + public T get() { + return this.referent; + } + + public void clear() { + this.jsWeakRef = null; + } + + public boolean isEnqueued() { + return false; + } + + public boolean enqueue() { + throw new IllegalStateException("never called when emulated"); + } + + protected Object clone() throws CloneNotSupportedException { + throw new CloneNotSupportedException(); + } + +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java new file mode 100644 index 00000000000..b340fd316b9 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java @@ -0,0 +1,5 @@ +package java.lang.ref; + +public class ReferenceQueue { + +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/SoftReference.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/SoftReference.java new file mode 100644 index 00000000000..f18650366ea --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/SoftReference.java @@ -0,0 +1,10 @@ +package java.lang.ref; + +public class SoftReference extends Reference { + public SoftReference(T referent) { + super(referent); + } + public SoftReference(T referent, ReferenceQueue q) { + super(referent, q); + } +} From a8dbe51593f9035489be8a3abdfd39026b793b2d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 9 Feb 2024 11:24:34 -0600 Subject: [PATCH 014/219] Use pooling instead of supersourcing it --- .../main/resources/io/deephaven/Util.gwt.xml | 2 +- .../io/deephaven/chunk/Chunk.gwt.xml | 2 +- .../util/pools/ChunkPoolReleaseTracking.java | 41 +++ .../chunk/util/pools/MultiChunkPool.java | 305 ------------------ .../web/super/java/lang/ref/Reference.java | 2 +- 5 files changed, 44 insertions(+), 308 deletions(-) create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java delete mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml index c8b4989cfa6..d8e3aa692ad 100644 --- a/Util/src/main/resources/io/deephaven/Util.gwt.xml +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -4,7 +4,7 @@ - + \ No newline at end of file diff --git a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml index e90134fedc6..4747791ddba 100644 --- a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml +++ b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml @@ -2,5 +2,5 @@ - + \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java new file mode 100644 index 00000000000..9acff6511d7 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.chunk.util.pools; + +/** + * Support for release tracking, in order to detect chunk release errors. + */ +public final class ChunkPoolReleaseTracking { + + public static void enableStrict() { + } + + public static void enable() { + } + + private static void enable(final Object factory, boolean preCheck) { + + } + + public static void disable() { + } + + static CHUNK_TYPE onTake(CHUNK_TYPE chunk) { + return chunk; + } + + static CHUNK_TYPE onGive(CHUNK_TYPE chunk) { + return chunk; + } + + public static void check() { + + } + + public static void checkAndDisable() { + + } + + private ChunkPoolReleaseTracking() {} +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java deleted file mode 100644 index 5a857e855d4..00000000000 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/MultiChunkPool.java +++ /dev/null @@ -1,305 +0,0 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ -package io.deephaven.chunk.util.pools; - -import io.deephaven.chunk.ChunkType; -import io.deephaven.chunk.ResettableBooleanChunk; -import io.deephaven.chunk.ResettableByteChunk; -import io.deephaven.chunk.ResettableCharChunk; -import io.deephaven.chunk.ResettableDoubleChunk; -import io.deephaven.chunk.ResettableFloatChunk; -import io.deephaven.chunk.ResettableIntChunk; -import io.deephaven.chunk.ResettableLongChunk; -import io.deephaven.chunk.ResettableObjectChunk; -import io.deephaven.chunk.ResettableShortChunk; -import io.deephaven.chunk.ResettableWritableBooleanChunk; -import io.deephaven.chunk.ResettableWritableByteChunk; -import io.deephaven.chunk.ResettableWritableCharChunk; -import io.deephaven.chunk.ResettableWritableDoubleChunk; -import io.deephaven.chunk.ResettableWritableFloatChunk; -import io.deephaven.chunk.ResettableWritableIntChunk; -import io.deephaven.chunk.ResettableWritableLongChunk; -import io.deephaven.chunk.ResettableWritableObjectChunk; -import io.deephaven.chunk.ResettableWritableShortChunk; -import io.deephaven.chunk.WritableBooleanChunk; -import io.deephaven.chunk.WritableByteChunk; -import io.deephaven.chunk.WritableCharChunk; -import io.deephaven.chunk.WritableDoubleChunk; -import io.deephaven.chunk.WritableFloatChunk; -import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableObjectChunk; -import io.deephaven.chunk.WritableShortChunk; -import io.deephaven.chunk.attributes.Any; -import org.jetbrains.annotations.NotNull; - -import java.util.Collections; -import java.util.EnumMap; -import java.util.Map; - -/** - * Provides a set of per-type {@link ChunkPool}s. Normally accessed via a {@link ThreadLocal}, to allow some threads to - * share a common pool and others to allocate their own. - */ -public final class MultiChunkPool implements BooleanChunkPool, ByteChunkPool, CharChunkPool, ShortChunkPool, - IntChunkPool, LongChunkPool, FloatChunkPool, DoubleChunkPool, ObjectChunkPool { - private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); - public static MultiChunkPool forThisThread() { - return SHARED_POOL; - } - - @Override - public ChunkPool asChunkPool() { - throw new UnsupportedOperationException( - "MultiChunkPool can't create a ChunkPool, call this on the specific type required"); - } - - @Override - public WritableBooleanChunk takeWritableBooleanChunk(int capacity) { - return WritableBooleanChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableBooleanChunk(@NotNull WritableBooleanChunk writableBooleanChunk) { - } - - @Override - public ResettableBooleanChunk takeResettableBooleanChunk() { - return ResettableBooleanChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableBooleanChunk(@NotNull ResettableBooleanChunk resettableBooleanChunk) { - } - - @Override - public ResettableWritableBooleanChunk takeResettableWritableBooleanChunk() { - return ResettableWritableBooleanChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableBooleanChunk( - @NotNull ResettableWritableBooleanChunk resettableWritableBooleanChunk) { - } - - @Override - public WritableCharChunk takeWritableCharChunk(int capacity) { - return WritableCharChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableCharChunk(@NotNull WritableCharChunk writableCharChunk) { - } - - @Override - public ResettableCharChunk takeResettableCharChunk() { - return ResettableCharChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableCharChunk(@NotNull ResettableCharChunk resettableCharChunk) { - } - - @Override - public ResettableWritableCharChunk takeResettableWritableCharChunk() { - return ResettableWritableCharChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableCharChunk(@NotNull ResettableWritableCharChunk resettableWritableCharChunk) { - } - - @Override - public WritableByteChunk takeWritableByteChunk(int capacity) { - return WritableByteChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableByteChunk(@NotNull WritableByteChunk writableByteChunk) { - } - - @Override - public ResettableByteChunk takeResettableByteChunk() { - return ResettableByteChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableByteChunk(@NotNull ResettableByteChunk resettableByteChunk) { - } - - @Override - public ResettableWritableByteChunk takeResettableWritableByteChunk() { - return ResettableWritableByteChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableByteChunk(@NotNull ResettableWritableByteChunk resettableWritableByteChunk) { - } - - @Override - public WritableShortChunk takeWritableShortChunk(int capacity) { - return WritableShortChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableShortChunk(@NotNull WritableShortChunk writableShortChunk) { - } - - @Override - public ResettableShortChunk takeResettableShortChunk() { - return ResettableShortChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableShortChunk(@NotNull ResettableShortChunk resettableShortChunk) { - } - - @Override - public ResettableWritableShortChunk takeResettableWritableShortChunk() { - return ResettableWritableShortChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableShortChunk( - @NotNull ResettableWritableShortChunk resettableWritableShortChunk) { - } - - @Override - public WritableIntChunk takeWritableIntChunk(int capacity) { - return WritableIntChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableIntChunk(@NotNull WritableIntChunk writableIntChunk) { - } - - @Override - public ResettableIntChunk takeResettableIntChunk() { - return ResettableIntChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableIntChunk(@NotNull ResettableIntChunk resettableIntChunk) { - } - - @Override - public ResettableWritableIntChunk takeResettableWritableIntChunk() { - return ResettableWritableIntChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableIntChunk(@NotNull ResettableWritableIntChunk resettableWritableIntChunk) { - } - - @Override - public WritableLongChunk takeWritableLongChunk(int capacity) { - return WritableLongChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableLongChunk(@NotNull WritableLongChunk writableLongChunk) { - } - - @Override - public ResettableLongChunk takeResettableLongChunk() { - return ResettableLongChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableLongChunk(@NotNull ResettableLongChunk resettableLongChunk) { - } - - @Override - public ResettableWritableLongChunk takeResettableWritableLongChunk() { - return ResettableWritableLongChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableLongChunk(@NotNull ResettableWritableLongChunk resettableWritableLongChunk) { - } - - @Override - public WritableFloatChunk takeWritableFloatChunk(int capacity) { - return WritableFloatChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableFloatChunk(@NotNull WritableFloatChunk writableFloatChunk) { - } - - @Override - public ResettableFloatChunk takeResettableFloatChunk() { - return ResettableFloatChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableFloatChunk(@NotNull ResettableFloatChunk resettableFloatChunk) { - } - - @Override - public ResettableWritableFloatChunk takeResettableWritableFloatChunk() { - return ResettableWritableFloatChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableFloatChunk( - @NotNull ResettableWritableFloatChunk resettableWritableFloatChunk) { - } - - @Override - public WritableDoubleChunk takeWritableDoubleChunk(int capacity) { - return WritableDoubleChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableDoubleChunk(@NotNull WritableDoubleChunk writableDoubleChunk) { - } - - @Override - public ResettableDoubleChunk takeResettableDoubleChunk() { - return ResettableDoubleChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableDoubleChunk(@NotNull ResettableDoubleChunk resettableDoubleChunk) { - } - - @Override - public ResettableWritableDoubleChunk takeResettableWritableDoubleChunk() { - return ResettableWritableDoubleChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableDoubleChunk( - @NotNull ResettableWritableDoubleChunk resettableWritableDoubleChunk) { - } - - @Override - public WritableObjectChunk takeWritableObjectChunk(int capacity) { - return WritableObjectChunk.makeWritableChunkForPool(capacity); - } - - @Override - public void giveWritableObjectChunk(@NotNull WritableObjectChunk writableObjectChunk) { - } - - @Override - public ResettableObjectChunk takeResettableObjectChunk() { - return ResettableObjectChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableObjectChunk(@NotNull ResettableObjectChunk resettableObjectChunk) { - } - - @Override - public ResettableWritableObjectChunk takeResettableWritableObjectChunk() { - return ResettableWritableObjectChunk.makeResettableChunkForPool(); - } - - @Override - public void giveResettableWritableObjectChunk( - @NotNull ResettableWritableObjectChunk resettableWritableObjectChunk) { - } -} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java index 4570799b802..46448b46c8f 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java @@ -15,7 +15,7 @@ public abstract class Reference { } public T get() { - return this.referent; + return this.jsWeakRef.deref(); } public void clear() { From bcd4ccb3ac6463e21664ffd15802790c53efe30a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 1 Mar 2024 12:49:27 -0600 Subject: [PATCH 015/219] Remove useless line of code --- .../deephaven/engine/rowset/impl/ExternalizableRowSetUtils.java | 1 - .../web/client/api/barrage/CompressedRangeSetReader.java | 1 - 2 files changed, 2 deletions(-) diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/ExternalizableRowSetUtils.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/ExternalizableRowSetUtils.java index 242e94ed30e..5e992dfd442 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/ExternalizableRowSetUtils.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/ExternalizableRowSetUtils.java @@ -107,7 +107,6 @@ private static void flushShorts(@NotNull final DataOutput out, @NotNull final TS consecutiveBytes++; } else { consecutiveBytes = 0; - shortCount += consecutiveBytes; shortCount++; } if (consecutiveBytes > 3) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java index 6a069c279bc..683b38b0e37 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java @@ -101,7 +101,6 @@ private static void flushShorts(ByteBuffer payload, ShortBuffer shorts) { consecutiveBytes++; } else { consecutiveBytes = 0; - shortCount += consecutiveBytes; shortCount++; } if (consecutiveBytes > 3) { From 72c3e1d49481775645fd6b7e087136312cb0cdfe Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 1 Mar 2024 12:49:41 -0600 Subject: [PATCH 016/219] Promote assert to exception --- .../src/main/java/io/deephaven/web/client/api/JsTable.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index 69643c13e84..9775e446ffe 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -312,7 +312,9 @@ public boolean isAlive() { @Override public ClientTableState state() { - assert currentState != null : "Table already closed, cannot be used again"; + if (currentState == null) { + throw new IllegalStateException("Table already closed, cannot be used again"); + } return currentState; } From 37cb1428aabab2a904c65b01ba3fb49c77644f8c Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 5 Mar 2024 14:23:23 -0600 Subject: [PATCH 017/219] Roughed in barrage table impl --- .../barrage/data/WebBarrageSubscription.java | 333 ++++++++++++++++++ .../io/deephaven/web/shared/data/Range.java | 14 +- .../deephaven/web/shared/data/RangeSet.java | 78 +++- .../web/shared/data/RangeSetTest.java | 15 + 4 files changed, 427 insertions(+), 13 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java new file mode 100644 index 00000000000..3f9290cba21 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -0,0 +1,333 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.web.client.api.barrage.WebBarrageMessage; +import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; +import io.deephaven.web.client.state.ClientTableState; +import io.deephaven.web.shared.data.Range; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; + +import java.util.Arrays; +import java.util.BitSet; +import java.util.Iterator; +import java.util.NavigableSet; +import java.util.PrimitiveIterator; +import java.util.TreeMap; + +public abstract class WebBarrageSubscription { + public interface WebDataSink { + void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator); + + default void ensureCapacity(long size) {} + } + + protected final ClientTableState state; + protected final RangeSet currentRowSet = RangeSet.empty(); + + protected long capacity = 0; + protected final WebDataSink[] destSources; + + protected RangeSet serverViewport; + protected BitSet serverColumns; + protected boolean serverReverseViewport; + + public WebBarrageSubscription(ClientTableState state) { + this.state = state; + destSources = new WebDataSink[state.getTableDef().getColumns().length]; + } + + protected abstract void applyUpdates(WebBarrageMessage message); + + protected void updateServerViewport(RangeSet viewport, BitSet columns, boolean reverseViewport) { + serverViewport = viewport; + serverColumns = columns == null || columns.cardinality() == numColumns() ? null : columns; + serverReverseViewport = reverseViewport; + + // TODO viewport change event? + } + + private int numColumns() { + return getDefinition().getColumns().length; + } + + private InitialTableDefinition getDefinition() { + return state.getTableDef(); + } + + public RangeSet getCurrentRowSet() { + return currentRowSet; + } + + protected boolean isSubscribedColumn(int ii) { + return serverColumns == null || serverColumns.get(ii); + } + + // public static class BlinkImpl extends WebBarrageSubscription { + // + // } + + public static class RedirectedImpl extends WebBarrageSubscription { + private RangeSet freeset = new RangeSet(); + private final TreeMap redirectedIndexes = new TreeMap<>(); + + public RedirectedImpl(ClientTableState state) { + super(state); + } + + @Override + protected void applyUpdates(WebBarrageMessage message) { + if (message.isSnapshot) { + updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); + } + + final boolean mightBeInitialSnapshot = getCurrentRowSet().isEmpty() && message.isSnapshot; + + RangeSet populatedRows = + serverViewport != null ? currentRowSet.subsetForPositions(serverViewport, serverReverseViewport) + : null; + + // Apply removes to our local rowset + message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); + if (serverViewport != null) { + // limit the removed rows to what intersect the viewport + serverViewport.rangeIterator().forEachRemaining(r -> message.rowsRemoved.removeRange(r)); + } + // free rows that are no longer needed + freeRows(message.rowsRemoved); + + // Apply shifts + + // Shift moved rows in the redir index + boolean hasReverseShift = false; + final ShiftedRange[] shiftedRanges = message.shifted; + RangeSetBulkHelper currentRowsetShifter = + new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); + RangeSetBulkHelper populatedRowsetShifter = populatedRows == null ? null + : new RangeSetBulkHelper(populatedRows, RangeSetBulkHelper.Operation.APPEND); + for (int i = shiftedRanges.length - 1; i >= 0; --i) { + final ShiftedRange shiftedRange = shiftedRanges[i]; + final long offset = shiftedRange.getDelta(); + if (offset < 0) { + hasReverseShift = true; + continue; + } + currentRowSet.removeRange(shiftedRange.getRange()); + if (populatedRows != null) { + populatedRows.removeRange(shiftedRange.getRange()); + } + final NavigableSet toMove = redirectedIndexes.navigableKeySet() + .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); + // iterate backward and move them forward + for (Long key : toMove.descendingSet()) { + long shiftedKey = key + offset; + Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); + assert oldValue == null : shiftedKey + " already has a value, " + oldValue; + currentRowsetShifter.append(shiftedKey); + } + } + if (hasReverseShift) { + for (int i = 0; i < shiftedRanges.length; ++i) { + final ShiftedRange shiftedRange = shiftedRanges[i]; + final long offset = shiftedRange.getDelta(); + if (offset > 0) { + continue; + } + currentRowSet.removeRange(shiftedRange.getRange()); + if (populatedRows != null) { + populatedRows.removeRange(shiftedRange.getRange()); + } + final NavigableSet toMove = redirectedIndexes.navigableKeySet() + .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); + // iterate forward and move them backward + for (Long key : toMove) { + long shiftedKey = key + offset; + Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); + assert oldValue == null : shiftedKey + " already has a value, " + oldValue; + currentRowsetShifter.append(shiftedKey); + } + } + } + currentRowsetShifter.flush(); + populatedRowsetShifter.flush(); + + + message.rowsAdded.rangeIterator().forEachRemaining(currentRowSet::addRange); + + RangeSet totalMods = new RangeSet(); + for (int i = 0; i < message.modColumnData.length; i++) { + WebBarrageMessage.ModColumnData column = message.modColumnData[i]; + column.rowsModified.rangeIterator().forEachRemaining(totalMods::addRange); + } + + if (!message.rowsIncluded.isEmpty()) { + // int addBatchSize = (int) Math.min(message.rowsIncluded.size(), 1 << 16);//reexamine this constant in + // light of browsers being browsers + + if (mightBeInitialSnapshot) { + capacity = message.rowsIncluded.size(); + Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); + freeset.addRange(new Range(0, capacity - 1)); + } + + RangeSet destinationRowSet = getFreeRows(message.rowsIncluded.size()); + // RangeSet destinationRowSet = new RangeSet(); + // message.rowsIncluded.indexIterator().forEachRemaining((long row) -> { + // destinationRowSet.addRange(new Range(row, row)); + // }); + + for (int ii = 0; ii < message.addColumnData.length; ii++) { + if (isSubscribedColumn(ii)) { + WebBarrageMessage.AddColumnData column = message.addColumnData[ii]; + PrimitiveIterator.OfLong destIterator = destinationRowSet.indexIterator(); + for (int j = 0; j < column.data.size(); j++) { + Chunk chunk = column.data.get(j); + destSources[ii].fillChunk(chunk, destIterator); + } + assert !destIterator.hasNext(); + } + } + } + + for (int ii = 0; ii < message.modColumnData.length; ii++) { + WebBarrageMessage.ModColumnData column = message.modColumnData[ii]; + if (column.rowsModified.isEmpty()) { + continue; + } + + PrimitiveIterator.OfLong destIterator = column.rowsModified.indexIterator(); + for (int j = 0; j < column.data.size(); j++) { + Chunk chunk = column.data.get(j); + destSources[ii].fillChunk(chunk, destIterator); + } + assert !destIterator.hasNext(); + } + if (serverViewport != null) { + assert populatedRows != null; + RangeSet newPopulated = currentRowSet.subsetForPositions(serverViewport, serverReverseViewport); + newPopulated.rangeIterator().forEachRemaining(newPopulated::removeRange); + freeRows(populatedRows); + } + } + + private RangeSet getFreeRows(long size) { + if (size <= 0) { + return RangeSet.empty(); + } + boolean needsResizing = false; + final RangeSet result; + if (capacity == 0) { + capacity = Long.highestOneBit(Math.max(size * 2, 8)); + freeset.addRange(new Range(size, capacity - 1)); + result = new RangeSet(); + result.addRange(new Range(0, size - 1)); + needsResizing = true; + } else { + result = new RangeSet(); + Iterator iterator = freeset.rangeIterator(); + int required = (int) Math.min(size, freeset.size()); + while (required > 0) { + assert iterator.hasNext(); + Range next = iterator.next(); + result.addRange( + next.size() < required ? next : new Range(next.getFirst(), next.getFirst() + required - 1)); + required -= (int) next.size(); + } + + if (freeset.size() < size) { + // we need more, allocate extra, return some, grow the freeset for next time + long usedSlots = capacity - freeset.size(); + long prevCapacity = capacity; + + do { + capacity *= 2; + } while ((capacity - usedSlots) < size); + + result.addRange(new Range(prevCapacity, size - 1)); + + freeset = new RangeSet(); + freeset.addRange(new Range(size, capacity - 1)); + needsResizing = true; + } + } + + if (needsResizing) { + Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); + } + + return result; + } + + private void freeRows(RangeSet removed) { + RangeSetBulkHelper reusableHelper = new RangeSetBulkHelper(freeset, RangeSetBulkHelper.Operation.APPEND); + removed.indexIterator().forEachRemaining((long index) -> { + long dest = redirectedIndexes.remove(index); + reusableHelper.append(dest); + }); + reusableHelper.flush(); + } + } + + /** + * Helper to avoid appending many times when modifying indexes. The append() method should be called for each key + * _in order_ to ensure that addRange/removeRange isn't called excessively. When no more items will be added, + * flush() must be called. + */ + private static class RangeSetBulkHelper { + enum Operation { + APPEND, REMOVE + } + + private final RangeSet rangeSet; + private final Operation operation; + + private long currentFirst = -1; + private long currentLast; + + public RangeSetBulkHelper(final RangeSet rangeSet, Operation operation) { + this.rangeSet = rangeSet; + this.operation = operation; + } + + public void append(long key) { + assert key >= 0; + + if (currentFirst == -1) { + // first key to be added, move both first and last + currentFirst = key; + currentLast = key; + } else if (key == currentLast + 1) { + // key appends to our current range + currentLast = key; + } else if (key == currentFirst - 1) { + // key appends to our current range + currentFirst = key; + } else { + // existing range doesn't match the new item, finish the old range and start a new one + if (operation == Operation.APPEND) { + rangeSet.addRange(new Range(currentFirst, currentLast)); + } else { + rangeSet.removeRange(new Range(currentFirst, currentLast)); + } + currentFirst = key; + currentLast = key; + } + } + + public void flush() { + if (currentFirst != -1) { + if (operation == Operation.APPEND) { + rangeSet.addRange(new Range(currentFirst, currentLast)); + } else { + rangeSet.removeRange(new Range(currentFirst, currentLast)); + } + currentFirst = -1; + } + } + } + +} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index 16ef88a4e60..bf74f290d09 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -11,9 +11,9 @@ * instances will compare only by start - the overlap(Range) method should be used to see if two ranges share at least * one item. */ -public class Range implements Serializable, Comparable { - private long first; - private long last; +public class Range implements Comparable { + private final long first; + private final long last; // serialization Range() { @@ -36,14 +36,6 @@ public long getLast() { return last; } - void setFirst(long first) { - this.first = first; - } - - void setLast(long last) { - this.last = last; - } - @Override public int compareTo(@Nonnull Range o) { return Long.compare(first, o.first); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index c782582a425..7a20483716f 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -3,7 +3,6 @@ // package io.deephaven.web.shared.data; -import java.io.Serializable; import java.util.Arrays; import java.util.Iterator; import java.util.PrimitiveIterator; @@ -14,7 +13,7 @@ * Iteration protocol, but for now has one method which returns an iterator, and also supports querying the size. * Additionally, we may add support for creating RangeSet objects to better serve some use cases. */ -public class RangeSet implements Serializable { +public class RangeSet { public static RangeSet empty() { return new RangeSet(); @@ -358,4 +357,79 @@ Range[] getSortedRanges() { void setSortedRanges(Range[] sortedRanges) { this.sortedRanges = sortedRanges; } + + public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { + if (reversed) { + throw new UnsupportedOperationException("reversed=true"); + } + if (positions.isEmpty() || isEmpty()) { + return empty(); + } + // if (positions.sortedRanges.length == 1) { + // // Desired range is contiguous + // List ranges = new ArrayList<>(); + // final long offset = positions.getFirstRow(); + // final long limit = positions.getLastRow(); + // int i = 0; + // long position = 0; + // for (; i < sortedRanges.length; i++) { + // Range r = sortedRanges[i]; + // if (offset < position + r.size()) { + // // Haven't hit the first range yet + // position += r.size(); + // continue; + // } + // // This range is part of the desired range, take some/all of it + // //TODO wrong, we want the min to measure the index of the range to take + // ranges.add(new Range(position, Math.min(r.getLast(), limit))); + // position += r.size(); + // i++; + // break; + // } + // for (; i < sortedRanges.length; i++) { + // Range r = sortedRanges[i]; + // if (limit > position + r.size()) { + // // Past the end of the desired positions + // break; + // } + //// ranges.add(new Range(r.getFirst(), Math.)) + // + // + // } + // + // + // return RangeSet.fromSortedRanges(ranges.toArray(Range[]::new)); + // } + + + PrimitiveIterator.OfLong positionIter = positions.indexIterator(); + PrimitiveIterator.OfLong valueIter = indexIterator(); + int i = 0; + RangeSet result = new RangeSet(); + + // There must be at least one of each + long position = positionIter.nextLong(); + long val = valueIter.nextLong(); + + done: do { + while (i != position) { + if (!valueIter.hasNext()) { + break done; + } + i++; + val = valueIter.nextLong(); + } + + result.addRange(new Range(val, val)); + + if (!positionIter.hasNext() || !valueIter.hasNext()) { + break; + } + position = positionIter.nextLong(); + i++; + val = valueIter.nextLong(); + } while (true); + + return result; + } } diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index d28a702bbbb..717aa3284a7 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -475,4 +475,19 @@ public void testLarge() { assertEquals(RangeSet.ofItems(largeA, largeB), rangeSet); } + @Test + public void testSubsetForPostions() { + RangeSet initialRange = RangeSet.ofItems(2, 4, 6, 8); + assertEquals(RangeSet.ofItems(4, 8), initialRange.subsetForPositions(RangeSet.ofItems(1, 3), false)); + assertEquals(RangeSet.ofItems(4, 8), initialRange.subsetForPositions(RangeSet.ofItems(1, 3, 4), false)); + assertEquals(RangeSet.ofItems(4, 8), initialRange.subsetForPositions(RangeSet.ofItems(1, 3, 5), false)); + assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofItems(0, 1, 2, 3, 4, 5, 100), false)); + assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofItems(0, 1, 2, 3, 100), false)); + + assertEquals(RangeSet.ofItems(4, 6, 8), initialRange.subsetForPositions(RangeSet.ofRange(1, 3), false)); + assertEquals(RangeSet.ofItems(2, 4, 6), initialRange.subsetForPositions(RangeSet.ofRange(0, 2), false)); + assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofRange(0, 3), false)); + assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofRange(0, 9), false)); + } + } From ffa1c446398489c485c89346f600b3d52d61d2f5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 5 Mar 2024 15:53:08 -0600 Subject: [PATCH 018/219] Rough in blinking tables, fix a NPE --- .../barrage/data/WebBarrageSubscription.java | 58 +++++++++++++++++-- 1 file changed, 53 insertions(+), 5 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 3f9290cba21..c24001c2c2a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -67,9 +67,56 @@ protected boolean isSubscribedColumn(int ii) { return serverColumns == null || serverColumns.get(ii); } - // public static class BlinkImpl extends WebBarrageSubscription { - // - // } + public static class BlinkImpl extends WebBarrageSubscription { + enum Mode { + BLINK, APPEND + } + + private final Mode mode; + + public BlinkImpl(ClientTableState state) { + super(state); + mode = Mode.APPEND; + } + + @Override + protected void applyUpdates(WebBarrageMessage message) { + if (message.isSnapshot) { + updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); + } + + assert message.shifted.length == 0; + for (int i = 0; i < message.modColumnData.length; i++) { + assert message.modColumnData[i].rowsModified.isEmpty(); + } + + if (message.rowsIncluded.isEmpty()) { + return; + } + + long addedRows = message.rowsAdded.size(); + RangeSet destinationRowSet; + if (mode == Mode.APPEND) { + destinationRowSet = RangeSet.ofRange(capacity, capacity + addedRows - 1); + capacity += addedRows; + } else { + destinationRowSet = RangeSet.ofRange(0, addedRows - 1); + capacity = addedRows; + } + Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); + for (int ii = 0; ii < message.addColumnData.length; ii++) { + if (isSubscribedColumn(ii)) { + WebBarrageMessage.AddColumnData column = message.addColumnData[ii]; + PrimitiveIterator.OfLong destIterator = destinationRowSet.indexIterator(); + for (int j = 0; j < column.data.size(); j++) { + Chunk chunk = column.data.get(j); + destSources[ii].fillChunk(chunk, destIterator); + } + assert !destIterator.hasNext(); + } + } + } + } public static class RedirectedImpl extends WebBarrageSubscription { private RangeSet freeset = new RangeSet(); @@ -153,8 +200,9 @@ protected void applyUpdates(WebBarrageMessage message) { } } currentRowsetShifter.flush(); - populatedRowsetShifter.flush(); - + if (populatedRowsetShifter != null) { + populatedRowsetShifter.flush(); + } message.rowsAdded.rangeIterator().forEachRemaining(currentRowSet::addRange); From 2149c365de3b5826949f96c6510c0cd359ce7f41 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 7 Mar 2024 11:29:22 -0600 Subject: [PATCH 019/219] Fix branch formatting after rebase --- .../deephaven/engine/table/impl/util/RowSetShiftCallback.java | 3 +++ .../io/deephaven/engine/table/impl/util/ShiftCallback.java | 3 +++ .../src/main/java/io/deephaven/web/client/fu/JsWeakRef.java | 3 +++ 3 files changed, 9 insertions(+) diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java index 01a952e9b54..72f3ffa9581 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/RowSetShiftCallback.java @@ -1,3 +1,6 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.engine.table.impl.util; import io.deephaven.engine.rowset.RowSet; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java index a45f707c675..82096ef1e44 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ShiftCallback.java @@ -1,3 +1,6 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.engine.table.impl.util; public interface ShiftCallback { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java index 0bfb8f1fd30..35de0ed3e11 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsWeakRef.java @@ -1,3 +1,6 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.web.client.fu; import jsinterop.annotations.JsPackage; From 3a0f9424ff92d96ff11b3780b39e082ad97347cd Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 7 Mar 2024 11:37:57 -0600 Subject: [PATCH 020/219] Minor rebase damage --- .../client/api/subscription/TableViewportSubscription.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index e5de2078083..294f2826963 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -16,11 +16,9 @@ import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.barrage.flatbuf.BarrageSnapshotOptions; import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; import io.deephaven.barrage.flatbuf.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; -import io.deephaven.javascript.proto.dhinternal.flatbuffers.Builder; import io.deephaven.web.client.api.Callbacks; import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.HasEventHandling; @@ -33,7 +31,6 @@ import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.state.ClientTableState; -import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.TableSnapshot; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNullable; @@ -47,7 +44,6 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Collections; -import java.util.Iterator; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; From 84962c7efc89466288f2461c1b6628777ceef89b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 26 Mar 2024 14:28:12 -0500 Subject: [PATCH 021/219] Giant hack-and-slash commit that nearly compiles --- .../io/deephaven/web/client/api/Column.java | 21 +- .../io/deephaven/web/client/api/JsTable.java | 575 +++++++++--------- .../web/client/api/WorkerConnection.java | 287 +-------- .../client/api/barrage/WebBarrageMessage.java | 12 +- .../api/barrage/WebBarrageStreamReader.java | 8 +- .../client/api/barrage/WebBarrageUtils.java | 43 +- .../barrage/data/WebBarrageSubscription.java | 102 +++- .../api/barrage/def/ColumnDefinition.java | 184 ++---- .../subscription/SubscriptionTableData.java | 12 +- .../api/subscription/TableSubscription.java | 343 +++++++++-- .../TableViewportSubscription.java | 9 +- .../client/api/subscription/ViewportData.java | 402 ++++++------ .../client/api/subscription/ViewportRow.java | 4 +- .../web/client/state/ActiveTableBinding.java | 54 -- .../web/client/state/ClientTableState.java | 260 ++++---- 15 files changed, 1142 insertions(+), 1174 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java b/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java index 4e4840754dd..a4c5cbedb24 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java @@ -10,6 +10,7 @@ import jsinterop.annotations.JsProperty; import jsinterop.base.Any; +import java.util.Objects; import java.util.stream.IntStream; import java.util.stream.IntStream.Builder; @@ -21,7 +22,6 @@ public class Column { private final int index; - private final Integer formatColumnIndex; private final Integer styleColumnIndex; private final Integer formatStringColumnIndex; @@ -75,7 +75,7 @@ public Column(int jsIndex, int index, Integer formatColumnIndex, Integer styleCo boolean inputTableKeyColumn, boolean isSortable) { this.jsIndex = jsIndex; this.index = index; - this.formatColumnIndex = formatColumnIndex; + assert Objects.equals(formatColumnIndex, styleColumnIndex); this.styleColumnIndex = styleColumnIndex; this.type = type; this.name = name; @@ -170,14 +170,6 @@ public void setConstituentType(final String constituentType) { this.constituentType = constituentType; } - /** - * @deprecated Prefer {@link #getFormatStringColumnIndex()}. - */ - @Deprecated - public Integer getFormatColumnIndex() { - return formatColumnIndex; - } - public Integer getFormatStringColumnIndex() { return formatStringColumnIndex; } @@ -266,7 +258,6 @@ public CustomColumn formatDate(String expression) { public String toString() { return "Column{" + "index=" + index + - ", formatColumnIndex=" + formatColumnIndex + ", styleColumnIndex=" + styleColumnIndex + ", formatStringColumnIndex=" + formatStringColumnIndex + ", type='" + type + '\'' + @@ -285,9 +276,6 @@ public boolean equals(Object o) { if (index != column.index) return false; - if (formatColumnIndex != null ? !formatColumnIndex.equals(column.formatColumnIndex) - : column.formatColumnIndex != null) - return false; if (styleColumnIndex != null ? !styleColumnIndex.equals(column.styleColumnIndex) : column.styleColumnIndex != null) return false; @@ -302,7 +290,6 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = index; - result = 31 * result + (formatColumnIndex != null ? formatColumnIndex.hashCode() : 0); result = 31 * result + (styleColumnIndex != null ? styleColumnIndex.hashCode() : 0); result = 31 * result + (formatStringColumnIndex != null ? formatStringColumnIndex.hashCode() : 0); result = 31 * result + type.hashCode(); @@ -311,12 +298,12 @@ public int hashCode() { } public Column withFormatStringColumnIndex(int formatStringColumnIndex) { - return new Column(jsIndex, index, formatColumnIndex, styleColumnIndex, type, name, isPartitionColumn, + return new Column(jsIndex, index, formatStringColumnIndex, styleColumnIndex, type, name, isPartitionColumn, formatStringColumnIndex, description, isInputTableKeyColumn, isSortable); } public Column withStyleColumnIndex(int styleColumnIndex) { - return new Column(jsIndex, index, formatColumnIndex, styleColumnIndex, type, name, isPartitionColumn, + return new Column(jsIndex, index, formatStringColumnIndex, styleColumnIndex, type, name, isPartitionColumn, formatStringColumnIndex, description, isInputTableKeyColumn, isSortable); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index 9775e446ffe..be9e4cce22f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -9,7 +9,6 @@ import com.vertispan.tsdefs.annotations.TsUnionMember; import elemental2.core.JsArray; import elemental2.dom.CustomEventInit; -import elemental2.dom.DomGlobal; import elemental2.promise.IThenable.ThenOnFulfilledCallbackFn; import elemental2.promise.Promise; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.RollupRequest; @@ -38,21 +37,17 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.runchartdownsamplerequest.ZoomRange; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.Ticket; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; -import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.TableAttributesDefinition; import io.deephaven.web.client.api.barrage.stream.ResponseStreamWrapper; import io.deephaven.web.client.api.batch.RequestBatcher; -import io.deephaven.web.client.api.batch.TableConfig; import io.deephaven.web.client.api.console.JsVariableType; import io.deephaven.web.client.api.filter.FilterCondition; -import io.deephaven.web.client.api.filter.FilterValue; import io.deephaven.web.client.api.input.JsInputTable; import io.deephaven.web.client.api.lifecycle.HasLifecycle; import io.deephaven.web.client.api.state.StateCache; import io.deephaven.web.client.api.subscription.TableSubscription; import io.deephaven.web.client.api.subscription.TableViewportSubscription; import io.deephaven.web.client.api.subscription.ViewportData; -import io.deephaven.web.client.api.subscription.ViewportData.MergeResults; import io.deephaven.web.client.api.subscription.ViewportRow; import io.deephaven.web.client.api.tree.JsRollupConfig; import io.deephaven.web.client.api.tree.JsTreeTable; @@ -66,14 +61,11 @@ import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.client.state.HasTableBinding; import io.deephaven.web.shared.data.*; -import io.deephaven.web.shared.data.TableSnapshot.SnapshotType; -import io.deephaven.web.shared.data.columns.ColumnData; import io.deephaven.web.shared.fu.JsConsumer; import io.deephaven.web.shared.fu.JsProvider; import io.deephaven.web.shared.fu.JsRunnable; import io.deephaven.web.shared.fu.RemoverFn; import javaemul.internal.annotations.DoNotAutobox; -import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsOptional; @@ -181,7 +173,6 @@ public class JsTable extends HasLifecycle implements HasTableBinding, JoinableTa private final int subscriptionId; private static int nextSubscriptionId; - private TableSubscription nonViewportSub; /** * Creates a new Table directly from an existing ClientTableState. The CTS manages all fetch operations, so this is @@ -736,17 +727,17 @@ public TableViewportSubscription setViewport(double firstRow, double lastRow, } } - public void setInternalViewport(double firstRow, double lastRow, Column[] columns) { - if (firstRow > lastRow) { - throw new IllegalArgumentException(firstRow + " > " + lastRow); - } - if (firstRow < 0) { - throw new IllegalArgumentException(firstRow + " < " + 0); - } - currentViewportData = null; - // we must wait for the latest stack entry that can add columns (so we get an appropriate BitSet) - state().setDesiredViewport(this, (long) firstRow, (long) lastRow, columns); - } + // private void setInternalViewport(double firstRow, double lastRow, Column[] columns) { + // if (firstRow > lastRow) { + // throw new IllegalArgumentException(firstRow + " > " + lastRow); + // } + // if (firstRow < 0) { + // throw new IllegalArgumentException(firstRow + " < " + 0); + // } + // currentViewportData = null; + // // we must wait for the latest stack entry that can add columns (so we get an appropriate BitSet) + // state().setDesiredViewport(this, (long) firstRow, (long) lastRow, columns); + // } /** * Gets the currently visible viewport. If the current set of operations has not yet resulted in data, it will not @@ -765,19 +756,19 @@ public Promise getViewportData() { return subscription.getInternalViewportData(); } - public Promise getInternalViewportData() { - final LazyPromise promise = new LazyPromise<>(); - final ClientTableState active = state(); - active.onRunning(state -> { - if (currentViewportData == null) { - // no viewport data received yet; let's set up a one-shot UPDATED event listener - addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(currentViewportData)); - } else { - promise.succeed(currentViewportData); - } - }, promise::fail, () -> promise.fail("Table closed before viewport data was read")); - return promise.asPromise(MAX_BATCH_TIME); - } + // public Promise getInternalViewportData() { + // final LazyPromise promise = new LazyPromise<>(); + // final ClientTableState active = state(); + // active.onRunning(state -> { + // if (currentViewportData == null) { + // // no viewport data received yet; let's set up a one-shot UPDATED event listener + // addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(currentViewportData)); + // } else { + // promise.succeed(currentViewportData); + // } + // }, promise::fail, () -> promise.fail("Table closed before viewport data was read")); + // return promise.asPromise(MAX_BATCH_TIME); + // } /** * Overload for java (since js just omits the optional var) @@ -800,19 +791,17 @@ public TableSubscription subscribe(JsArray columns) { */ @JsMethod public TableSubscription subscribe(JsArray columns, @JsOptional Double updateIntervalMs) { - assert nonViewportSub == null : "Can't directly subscribe to the 'private' table instance"; - // make a new table with a pUT call, listen to the subscription there return new TableSubscription(columns, this, updateIntervalMs); } - public void internalSubscribe(JsArray columns, TableSubscription sub) { - if (columns == null) { - columns = getColumns(); - } - this.nonViewportSub = sub; - - state().subscribe(this, Js.uncheckedCast(columns)); - } + // private void internalSubscribe(JsArray columns, TableSubscription sub) { + // if (columns == null) { + // columns = getColumns(); + // } + // this.nonViewportSub = sub; + // + // state().subscribe(this, Js.uncheckedCast(columns)); + // } /** * a new table containing the distinct tuples of values from the given columns that are present in the original @@ -1582,7 +1571,8 @@ public void revive(ClientTableState state) { unsuppressEvents(); LazyPromise.runLater(() -> { fireEvent(EVENT_RECONNECT); - getBinding().maybeReviveSubscription(); + // TODO + // getBinding().maybeReviveSubscription(); }); } } @@ -1610,204 +1600,204 @@ public Promise downsample(LongWrapper[] zoomRange, int pixelCount, Stri .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } - private final class Debounce { - private final ClientTableState state; - private final TableTicket handle; - private final SnapshotType type; - private final RangeSet includedRows; - private final BitSet columns; - private final Object[] dataColumns; - private final double timestamp; - private final long maxRows; - - public Debounce( - TableTicket table, - SnapshotType snapshotType, - RangeSet includedRows, - BitSet columns, - Object[] dataColumns, - long maxRows) { - this.handle = table; - this.type = snapshotType; - this.includedRows = includedRows; - this.columns = columns; - this.dataColumns = dataColumns; - this.state = currentState; - this.maxRows = maxRows; - timestamp = System.currentTimeMillis(); - } - - public boolean isEqual(Debounce o) { - if (type == o.type) { - // this is intentionally weird. We only want to debounce when one instance is column snapshot and the - // other is row snapshot, - // so we consider two events of the same type to be incompatible with debouncing. - return false; - } - if (handle != o.handle) { - assert !handle.equals(o.handle); - return false; - } - if (state != o.state) { - assert state.getHandle() != o.state.getHandle(); - return false; - } - if (!includedRows.equals(o.includedRows)) { - return false; - } - if (!columns.equals(o.columns)) { - return false; - } - if (maxRows != o.maxRows) { - return false; - } - assert Arrays.deepEquals(dataColumns, o.dataColumns) : "Debounce is broken, remove it."; - return true; - } - } - - private Debounce debounce; - - private void handleSnapshot(TableTicket table, SnapshotType snapshotType, RangeSet includedRows, - Object[] dataColumns, BitSet columns, long maxRows) { - assert table.equals(state().getHandle()) : "Table received incorrect snapshot"; - // if the type is initial_snapshot, we've already recorded the size, so only watch for the other two updates. - // note that this will sometimes result in multiple updates on startup, so we do this ugly debounce-dance. - // When IDS-2113 is fixed, we can likely remove this code. - JsLog.debug("Received snapshot for ", table, snapshotType, includedRows, dataColumns, columns); - Debounce operation = new Debounce(table, snapshotType, includedRows, columns, dataColumns, maxRows); - if (debounce == null) { - debounce = operation; - DomGlobal.setTimeout(ignored -> processSnapshot(), DEBOUNCE_TIME); - } else if (debounce.isEqual(operation)) { - // If we think the problem is fixed, we can put `assert false` here for a while before deleting Debounce - // class - JsLog.debug("Eating duplicated operation", debounce, operation); - } else { - processSnapshot(); - debounce = operation; - DomGlobal.setTimeout(ignored -> processSnapshot(), DEBOUNCE_TIME); - } - } - - public void handleSnapshot(TableTicket handle, TableSnapshot snapshot) { - if (!handle.equals(state().getHandle())) { - return; - } - Viewport viewport = getBinding().getSubscription(); - if (viewport == null || viewport.getRows() == null || viewport.getRows().size() == 0) { - // check out if we have a non-viewport sub attached - if (nonViewportSub != null) { - nonViewportSub.handleSnapshot(snapshot); - } - return; - } - - RangeSet viewportRows = viewport.getRows(); - JsLog.debug("handleSnapshot on " + viewportRows, handle, snapshot, viewport); - - RangeSet includedRows = snapshot.getIncludedRows(); - ColumnData[] dataColumns = snapshot.getDataColumns(); - JsArray[] remappedData = new JsArray[dataColumns.length]; - // remap dataColumns to the expected range for that table's viewport - long lastRow = -1; - for (int col = viewport.getColumns().nextSetBit(0); col >= 0; col = viewport.getColumns().nextSetBit(col + 1)) { - ColumnData dataColumn = dataColumns[col]; - if (dataColumn == null) { - // skip this, at least one column requested by that table isn't present, waiting on a later update - // TODO when IDS-2138 is fixed stop throwing this data away - return; - } - Object columnData = dataColumn.getData(); - - final ColumnDefinition def = state().getTableDef().getColumns()[col]; - remappedData[col] = JsData.newArray(def.getType()); - - PrimitiveIterator.OfLong viewportIterator = viewportRows.indexIterator(); - PrimitiveIterator.OfLong includedRowsIterator = includedRows.indexIterator(); - int dataIndex = 0; - while (viewportIterator.hasNext()) { - long viewportIndex = viewportIterator.nextLong(); - if (viewportIndex >= snapshot.getTableSize()) { - // reached or passed the end of the table, we'll still make a snapshot - break; - } - if (!includedRowsIterator.hasNext()) { - // we've reached the end, the viewport apparently goes past the end of what the server sent, - // so there is another snapshot on its way - // TODO when IDS-2138 is fixed stop throwing this data away - return; - } - - long possibleMatch = includedRowsIterator.nextLong(); - while (includedRowsIterator.hasNext() && possibleMatch < viewportIndex) { - dataIndex++;// skip, still seeking to the next item - - possibleMatch = includedRowsIterator.nextLong(); - } - if (!includedRowsIterator.hasNext() && possibleMatch < viewportIndex) { - // we didn't find any items which match, just give up - return; - } - - if (possibleMatch > viewportIndex) { - // if we hit a gap (more data coming, doesn't match viewport), skip the - // rest of this table entirely, a later update will get us caught up - return; - } - Object data = Js.>uncheckedCast(columnData).getAt(dataIndex); - remappedData[col].push(data); - dataIndex++;// increment for the next row - - // Track how many rows were actually present, allowing the snapshot to stop before the viewport's end - lastRow = Math.max(lastRow, possibleMatch); - } - } - - // TODO correct this - assumes max one range per table viewport, and nothing skipped - RangeSet actualViewport = - lastRow == -1 ? RangeSet.empty() : RangeSet.ofRange(viewportRows.indexIterator().nextLong(), lastRow); - - handleSnapshot(handle, snapshot.getSnapshotType(), actualViewport, remappedData, viewport.getColumns(), - viewportRows.size()); - } - - @JsIgnore - public void processSnapshot() { - try { - if (debounce == null) { - JsLog.debug("Skipping snapshot b/c debounce is null"); - return; - } - if (debounce.state != currentState) { - JsLog.debug("Skipping snapshot because state has changed ", debounce.state, " != ", currentState); - return; - } - if (isClosed()) { - JsLog.debug("Skipping snapshot because table is closed", this); - return; - } - JsArray viewportColumns = - getColumns().filter((item, index, all) -> debounce.columns.get(item.getIndex())); - ViewportData data = new ViewportData(debounce.includedRows, debounce.dataColumns, viewportColumns, - currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : currentState.getRowFormatColumn().getIndex(), - debounce.maxRows); - this.currentViewportData = data; - CustomEventInit updatedEvent = CustomEventInit.create(); - updatedEvent.setDetail(data); - fireEvent(EVENT_UPDATED, updatedEvent); - - // also fire rowadded events - TODO also fire some kind of remove event for now-missing rows? - for (int i = 0; i < data.getRows().length; i++) { - CustomEventInit addedEvent = CustomEventInit.create(); - addedEvent.setDetail(wrap(data.getRows().getAt(i), i)); - fireEvent(EVENT_ROWADDED, addedEvent); - } - } finally { - debounce = null; - } - } + // private final class Debounce { + // private final ClientTableState state; + // private final TableTicket handle; + // private final SnapshotType type; + // private final RangeSet includedRows; + // private final BitSet columns; + // private final Object[] dataColumns; + // private final double timestamp; + // private final long maxRows; + // + // public Debounce( + // TableTicket table, + // SnapshotType snapshotType, + // RangeSet includedRows, + // BitSet columns, + // Object[] dataColumns, + // long maxRows) { + // this.handle = table; + // this.type = snapshotType; + // this.includedRows = includedRows; + // this.columns = columns; + // this.dataColumns = dataColumns; + // this.state = currentState; + // this.maxRows = maxRows; + // timestamp = System.currentTimeMillis(); + // } + // + // public boolean isEqual(Debounce o) { + // if (type == o.type) { + // // this is intentionally weird. We only want to debounce when one instance is column snapshot and the + // // other is row snapshot, + // // so we consider two events of the same type to be incompatible with debouncing. + // return false; + // } + // if (handle != o.handle) { + // assert !handle.equals(o.handle); + // return false; + // } + // if (state != o.state) { + // assert state.getHandle() != o.state.getHandle(); + // return false; + // } + // if (!includedRows.equals(o.includedRows)) { + // return false; + // } + // if (!columns.equals(o.columns)) { + // return false; + // } + // if (maxRows != o.maxRows) { + // return false; + // } + // assert Arrays.deepEquals(dataColumns, o.dataColumns) : "Debounce is broken, remove it."; + // return true; + // } + // } + + // private Debounce debounce; + + // private void handleSnapshot(TableTicket table, SnapshotType snapshotType, RangeSet includedRows, + // Object[] dataColumns, BitSet columns, long maxRows) { + // assert table.equals(state().getHandle()) : "Table received incorrect snapshot"; + // // if the type is initial_snapshot, we've already recorded the size, so only watch for the other two updates. + // // note that this will sometimes result in multiple updates on startup, so we do this ugly debounce-dance. + // // When IDS-2113 is fixed, we can likely remove this code. + // JsLog.debug("Received snapshot for ", table, snapshotType, includedRows, dataColumns, columns); + // Debounce operation = new Debounce(table, snapshotType, includedRows, columns, dataColumns, maxRows); + // if (debounce == null) { + // debounce = operation; + // DomGlobal.setTimeout(ignored -> processSnapshot(), DEBOUNCE_TIME); + // } else if (debounce.isEqual(operation)) { + // // If we think the problem is fixed, we can put `assert false` here for a while before deleting Debounce + // // class + // JsLog.debug("Eating duplicated operation", debounce, operation); + // } else { + // processSnapshot(); + // debounce = operation; + // DomGlobal.setTimeout(ignored -> processSnapshot(), DEBOUNCE_TIME); + // } + // } + + // private void handleSnapshot(TableTicket handle, TableSnapshot snapshot) { + // if (!handle.equals(state().getHandle())) { + // return; + // } + // Viewport viewport = getBinding().getSubscription(); + // if (viewport == null || viewport.getRows() == null || viewport.getRows().size() == 0) { + // // check out if we have a non-viewport sub attached + // if (nonViewportSub != null) { + // nonViewportSub.handleSnapshot(snapshot); + // } + // return; + // } + // + // RangeSet viewportRows = viewport.getRows(); + // JsLog.debug("handleSnapshot on " + viewportRows, handle, snapshot, viewport); + // + // RangeSet includedRows = snapshot.getIncludedRows(); + // ColumnData[] dataColumns = snapshot.getDataColumns(); + // JsArray[] remappedData = new JsArray[dataColumns.length]; + // // remap dataColumns to the expected range for that table's viewport + // long lastRow = -1; + // for (int col = viewport.getColumns().nextSetBit(0); col >= 0; col = viewport.getColumns().nextSetBit(col + 1)) { + // ColumnData dataColumn = dataColumns[col]; + // if (dataColumn == null) { + // // skip this, at least one column requested by that table isn't present, waiting on a later update + // // TODO when IDS-2138 is fixed stop throwing this data away + // return; + // } + // Object columnData = dataColumn.getData(); + // + // final ColumnDefinition def = state().getTableDef().getColumns()[col]; + // remappedData[col] = JsData.newArray(def.getType()); + // + // PrimitiveIterator.OfLong viewportIterator = viewportRows.indexIterator(); + // PrimitiveIterator.OfLong includedRowsIterator = includedRows.indexIterator(); + // int dataIndex = 0; + // while (viewportIterator.hasNext()) { + // long viewportIndex = viewportIterator.nextLong(); + // if (viewportIndex >= snapshot.getTableSize()) { + // // reached or passed the end of the table, we'll still make a snapshot + // break; + // } + // if (!includedRowsIterator.hasNext()) { + // // we've reached the end, the viewport apparently goes past the end of what the server sent, + // // so there is another snapshot on its way + // // TODO when IDS-2138 is fixed stop throwing this data away + // return; + // } + // + // long possibleMatch = includedRowsIterator.nextLong(); + // while (includedRowsIterator.hasNext() && possibleMatch < viewportIndex) { + // dataIndex++;// skip, still seeking to the next item + // + // possibleMatch = includedRowsIterator.nextLong(); + // } + // if (!includedRowsIterator.hasNext() && possibleMatch < viewportIndex) { + // // we didn't find any items which match, just give up + // return; + // } + // + // if (possibleMatch > viewportIndex) { + // // if we hit a gap (more data coming, doesn't match viewport), skip the + // // rest of this table entirely, a later update will get us caught up + // return; + // } + // Object data = Js.>uncheckedCast(columnData).getAt(dataIndex); + // remappedData[col].push(data); + // dataIndex++;// increment for the next row + // + // // Track how many rows were actually present, allowing the snapshot to stop before the viewport's end + // lastRow = Math.max(lastRow, possibleMatch); + // } + // } + // + // // TODO correct this - assumes max one range per table viewport, and nothing skipped + // RangeSet actualViewport = + // lastRow == -1 ? RangeSet.empty() : RangeSet.ofRange(viewportRows.indexIterator().nextLong(), lastRow); + // + // handleSnapshot(handle, snapshot.getSnapshotType(), actualViewport, remappedData, viewport.getColumns(), + // viewportRows.size()); + // } + + // @JsIgnore + // public void processSnapshot() { + // try { + // if (debounce == null) { + // JsLog.debug("Skipping snapshot b/c debounce is null"); + // return; + // } + // if (debounce.state != currentState) { + // JsLog.debug("Skipping snapshot because state has changed ", debounce.state, " != ", currentState); + // return; + // } + // if (isClosed()) { + // JsLog.debug("Skipping snapshot because table is closed", this); + // return; + // } + // JsArray viewportColumns = + // getColumns().filter((item, index, all) -> debounce.columns.get(item.getIndex())); + // ViewportData data = new ViewportData(debounce.includedRows, debounce.dataColumns, viewportColumns, + // currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + // : currentState.getRowFormatColumn().getIndex(), + // debounce.maxRows); + // this.currentViewportData = data; + // CustomEventInit updatedEvent = CustomEventInit.create(); + // updatedEvent.setDetail(data); + // fireEvent(EVENT_UPDATED, updatedEvent); + // + // // also fire rowadded events - TODO also fire some kind of remove event for now-missing rows? + // for (int i = 0; i < data.getRows().length; i++) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(data.getRows().getAt(i), i)); + // fireEvent(EVENT_ROWADDED, addedEvent); + // } + // } finally { + // debounce = null; + // } + // } /** * True if this table has been closed. @@ -1849,57 +1839,58 @@ public String getPluginName() { } // Factored out so that we always apply the same format - private Object wrap(ViewportRow at, int index) { + private JsPropertyMap wrap(ViewportRow at, int index) { return JsPropertyMap.of("row", at, "index", (double) index); } - public void handleDelta(ClientTableState current, DeltaUpdates updates) { - current.onRunning(s -> { - if (current != state()) { - return; - } - if (nonViewportSub != null) { - nonViewportSub.handleDelta(updates); - return; - } - final ViewportData vpd = currentViewportData; - if (vpd == null) { - // if the current viewport data is null, we're waiting on an initial snapshot to arrive for a different - // part of the viewport - JsLog.debug("Received delta while waiting for reinitialization"); - return; - } - MergeResults mergeResults = vpd.merge(updates); - if (mergeResults.added.size() == 0 && mergeResults.modified.size() == 0 - && mergeResults.removed.size() == 0) { - return; - } - CustomEventInit event = CustomEventInit.create(); - event.setDetail(vpd); - // user might call setViewport, and wind up nulling our currentViewportData - fireEvent(EVENT_UPDATED, event); - - // fire rowadded/rowupdated/rowremoved - // TODO when we keep more rows loaded than the user is aware of, check if a given row is actually in the - // viewport - // here - for (Integer index : mergeResults.added) { - CustomEventInit addedEvent = CustomEventInit.create(); - addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); - fireEvent(EVENT_ROWADDED, addedEvent); - } - for (Integer index : mergeResults.modified) { - CustomEventInit addedEvent = CustomEventInit.create(); - addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); - fireEvent(EVENT_ROWUPDATED, addedEvent); - } - for (Integer index : mergeResults.removed) { - CustomEventInit addedEvent = CustomEventInit.create(); - addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); - fireEvent(EVENT_ROWREMOVED, addedEvent); - } - }, JsRunnable.doNothing()); - } + // private void handleDelta(ClientTableState current, DeltaUpdates updates) { + // // TODO remove this, should route this through the subscription and fire here + // current.onRunning(s -> { + // if (current != state()) { + // return; + // } + // if (nonViewportSub != null) { + // nonViewportSub.handleDelta(updates); + // return; + // } + // final ViewportData vpd = currentViewportData; + // if (vpd == null) { + // // if the current viewport data is null, we're waiting on an initial snapshot to arrive for a different + // // part of the viewport + // JsLog.debug("Received delta while waiting for reinitialization"); + // return; + // } + // MergeResults mergeResults = vpd.merge(updates); + // if (mergeResults.added.size() == 0 && mergeResults.modified.size() == 0 + // && mergeResults.removed.size() == 0) { + // return; + // } + // CustomEventInit event = CustomEventInit.create(); + // event.setDetail(vpd); + // // user might call setViewport, and wind up nulling our currentViewportData + // fireEvent(EVENT_UPDATED, event); + // + // // fire rowadded/rowupdated/rowremoved + // // TODO when we keep more rows loaded than the user is aware of, check if a given row is actually in the + // // viewport + // // here + // for (Integer index : mergeResults.added) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWADDED, addedEvent); + // } + // for (Integer index : mergeResults.modified) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWUPDATED, addedEvent); + // } + // for (Integer index : mergeResults.removed) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWREMOVED, addedEvent); + // } + // }, JsRunnable.doNothing()); + // } @Override public TableTicket getHandle() { @@ -1955,7 +1946,7 @@ public void refreshViewport(ClientTableState state, Viewport vp) { } // get the column expected to be in the snapshot - JsArray columns = Js.uncheckedCast(getBinding().getColumns()); + JsArray columns = null;// Js.uncheckedCast(getBinding().getColumns()); Column[] allColumns = state.getColumns(); if (columns == null) { columns = Js.uncheckedCast(allColumns); @@ -2170,7 +2161,7 @@ public int getSubscriptionId() { @Override public void maybeReviveSubscription() { - getBinding().maybeReviveSubscription(); + // getBinding().maybeReviveSubscription(); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index e5e79d06428..b3c1663fd02 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -13,14 +13,6 @@ import elemental2.dom.CustomEventInit; import elemental2.dom.DomGlobal; import elemental2.promise.Promise; -import io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; -import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.barrage.flatbuf.ColumnConversionMode; -import io.deephaven.chunk.ChunkType; -import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.browserflight_pb_service.BrowserFlightServiceClient; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.HandshakeRequest; @@ -60,10 +52,7 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb_service.TableServiceClient; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.Ticket; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; -import io.deephaven.web.client.api.barrage.WebBarrageMessage; -import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; import io.deephaven.web.client.api.barrage.WebBarrageUtils; -import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.api.barrage.stream.HandshakeStreamFactory; @@ -88,10 +77,6 @@ import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.client.state.HasTableBinding; import io.deephaven.web.client.state.TableReviver; -import io.deephaven.web.shared.data.DeltaUpdates; -import io.deephaven.web.shared.data.RangeSet; -import io.deephaven.web.shared.data.TableSnapshot; -import io.deephaven.web.shared.data.TableSubscriptionRequest; import io.deephaven.web.shared.fu.JsConsumer; import io.deephaven.web.shared.fu.JsRunnable; import jsinterop.annotations.JsMethod; @@ -107,14 +92,10 @@ import org.apache.arrow.flatbuf.MetadataVersion; import org.apache.arrow.flatbuf.RecordBatch; import org.apache.arrow.flatbuf.Schema; -import org.gwtproject.nio.TypedArrayHelper; import javax.annotation.Nullable; -import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; -import java.util.BitSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -126,11 +107,6 @@ import java.util.stream.Collectors; import static io.deephaven.web.client.api.CoreClient.EVENT_REFRESH_TOKEN_UPDATED; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.DeltaUpdatesBuilder; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.createSnapshot; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.deltaUpdates; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer; import static io.deephaven.web.client.api.barrage.WebGrpcUtils.CLIENT_OPTIONS; /** @@ -207,7 +183,6 @@ private enum State { private final Set flushable = new HashSet<>(); private final JsSet> logCallbacks = new JsSet<>(); - private final Map> subscriptionStreams = new HashMap<>(); private ResponseStreamWrapper exportNotifications; private JsSet simpleReconnectableInstances = new JsSet<>(); @@ -308,7 +283,7 @@ private void connectToWorker() { ClientTableState[] hasActiveSubs = cache.getAllStates().stream() .peek(cts -> { cts.getHandle().setConnected(false); - cts.setSubscribed(false); + // cts.setSubscribed(false); cts.forActiveLifecycles(item -> { assert !(item instanceof JsTable) || ((JsTable) item).state() == cts @@ -561,52 +536,13 @@ private void subscribeToTerminationNotification() { }); } - // @Override - public void initialSnapshot(TableTicket handle, TableSnapshot snapshot) { - LazyPromise.runLater(() -> { - // notify table that it has a snapshot available to replace viewport rows - // TODO looping in this way is not ideal, means that we're roughly O(n*m), where - // n is the number of rows, and m the number of tables with viewports. - // Instead, we should track all rows here in WorkerConnection, and then - // tell every table who might be interested about the rows it is interested in. - if (!cache.get(handle).isPresent()) { - JsLog.debug("Discarding snapshot for ", handle, " : ", snapshot); - } - cache.get(handle).ifPresent(s -> { - s.setSize(snapshot.getTableSize()); - s.forActiveTables(table -> { - table.handleSnapshot(handle, snapshot); - }); - }); - }); - } - - // @Override - public void incrementalUpdates(TableTicket tableHandle, DeltaUpdates updates) { - LazyPromise.runLater(() -> { - // notify table that it has individual row updates - final Optional cts = cache.get(tableHandle); - if (!cts.isPresent()) { - JsLog.debug("Discarding delta for disconnected state ", tableHandle, " : ", updates); - } - JsLog.debug("Delta received", tableHandle, updates); - cts.ifPresent(s -> { - if (!s.isSubscribed()) { - JsLog.debug("Discarding delta for unsubscribed table", tableHandle, updates); - return; - } - s.handleDelta(updates); - }); - }); - } - // @Override public void exportedTableUpdateMessage(TableTicket clientId, long size) { cache.get(clientId).ifPresent(state -> { - if (!state.isSubscribed()) { - // not presently subscribed so this is the only way to be informed of size changes - state.setSize(size); - } + // if (!state.isSubscribed()) { + // // not presently subscribed so this is the only way to be informed of size changes + // state.setSize(size); + // } }); } @@ -1321,7 +1257,7 @@ public StateCache getCache() { } /** - * Schedules a deferred command to check the given state for active tables and adjust viewports accordingly. + * Schedules a deferred command to check the given state for active tables. */ public void scheduleCheck(ClientTableState state) { if (flushable.isEmpty()) { @@ -1346,214 +1282,25 @@ public void releaseTicket(Ticket ticket) { sessionServiceClient.release(releaseRequest, metadata, null); } - - /** - * For those calls where we don't really care what happens - */ - private static final Callback DONOTHING_CALLBACK = new Callback() { - @Override - public void onSuccess(Void value) { - // Do nothing. - } - - @Override - public void onFailure(String error) { - JsLog.error("Callback failed: " + error); - } - }; - private void flush() { - // LATER: instead of running a bunch of serial operations, - // condense these all into a single batch operation. - // All three server calls made by this method are _only_ called by this method, - // so we can reasonably merge all three into a single batched operation. ArrayList statesToFlush = new ArrayList<>(flushable); flushable.clear(); - for (ClientTableState state : statesToFlush) { - if (state.hasNoSubscriptions()) { - // state may be retained if it is held by at least one paused binding; - // it is either an unsubscribed active table, an interim state for an - // active table, or a pending rollback for an operation that has not - // yet completed (we leave orphaned nodes paused until a request completes). - if (state.isSubscribed()) { - state.setSubscribed(false); + if (state.isEmpty()) { + // completely empty; perform release + final ClientTableState.ResolutionState previousState = state.getResolution(); + state.setResolution(ClientTableState.ResolutionState.RELEASED); + // state.setSubscribed(false); + if (previousState != ClientTableState.ResolutionState.RELEASED) { + cache.release(state); + + JsLog.debug("Releasing state", state, LazyString.of(state.getHandle())); + // don't send a release message to the server if the table isn't really there if (state.getHandle().isConnected()) { - BiDiStream stream = subscriptionStreams.remove(state); - if (stream != null) { - stream.end(); - stream.cancel(); - } - } - } - - if (state.isEmpty()) { - // completely empty; perform release - final ClientTableState.ResolutionState previousState = state.getResolution(); - state.setResolution(ClientTableState.ResolutionState.RELEASED); - state.setSubscribed(false); - if (previousState != ClientTableState.ResolutionState.RELEASED) { - cache.release(state); - - JsLog.debug("Releasing state", state, LazyString.of(state.getHandle())); - // don't send a release message to the server if the table isn't really there - if (state.getHandle().isConnected()) { - releaseHandle(state.getHandle()); - } + releaseHandle(state.getHandle()); } } - } else { - List vps = new ArrayList<>(); - state.forActiveSubscriptions((table, subscription) -> { - assert table.isActive(state) : "Inactive table has a viewport still attached"; - vps.add(new TableSubscriptionRequest(table.getSubscriptionId(), subscription.getRows(), - subscription.getColumns())); - }); - - boolean isViewport = vps.stream().allMatch(req -> req.getRows() != null); - assert isViewport || vps.stream().noneMatch(req -> req.getRows() != null) - : "All subscriptions to a given handle must be consistently viewport or non-viewport"; - - - BitSet includedColumns = vps.stream().map(TableSubscriptionRequest::getColumns).reduce((bs1, bs2) -> { - BitSet result = new BitSet(); - result.or(bs1); - result.or(bs2); - return result; - }).orElseThrow(() -> new IllegalStateException("Cannot call subscribe with zero subscriptions")); - String[] columnTypes = Arrays.stream(state.getTableDef().getColumns()) - .map(ColumnDefinition::getType) - .toArray(String[]::new); - - state.setSubscribed(true); - - FlatBufferBuilder subscriptionReq = new FlatBufferBuilder(1024); - - int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(subscriptionReq, - includedColumns.toByteArray()); - int viewportOffset = 0; - if (isViewport) { - viewportOffset = BarrageSubscriptionRequest.createViewportVector(subscriptionReq, serializeRanges( - vps.stream().map(TableSubscriptionRequest::getRows).collect(Collectors.toSet()))); - } - // TODO #188 support minUpdateIntervalMs - int serializationOptionsOffset = BarrageSubscriptionOptions - .createBarrageSubscriptionOptions(subscriptionReq, ColumnConversionMode.Stringify, true, 1000, - 0, 0, false); - int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(subscriptionReq, - Js.uncheckedCast(state.getHandle().getTicket())); - BarrageSubscriptionRequest.startBarrageSubscriptionRequest(subscriptionReq); - BarrageSubscriptionRequest.addColumns(subscriptionReq, columnsOffset); - BarrageSubscriptionRequest.addSubscriptionOptions(subscriptionReq, serializationOptionsOffset); - BarrageSubscriptionRequest.addViewport(subscriptionReq, viewportOffset); - BarrageSubscriptionRequest.addTicket(subscriptionReq, tableTicketOffset); - subscriptionReq.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(subscriptionReq)); - - FlightData request = new FlightData(); - request.setAppMetadata( - WebBarrageUtils.wrapMessage(subscriptionReq, BarrageMessageType.BarrageSubscriptionRequest)); - - BiDiStream stream = this.streamFactory().create( - headers -> flightServiceClient.doExchange(headers), - (first, headers) -> browserFlightServiceClient.openDoExchange(first, headers), - (next, headers, c) -> browserFlightServiceClient.nextDoExchange(next, headers, c::apply), - new FlightData()); - - stream.send(request); - stream.onData(new JsConsumer() { - @Override - public void apply(FlightData data) { - try { - WebBarrageMessage webBarrageMessage = - new WebBarrageStreamReader().parseFrom(BarrageSnapshotOptions.builder().build(), - includedColumns, new ChunkType[0], new Class[0], new Class[0], data); - } catch (IOException e) { - throw new RuntimeException(e); - } - ByteBuffer body = typedArrayToAlignedLittleEndianByteBuffer(data.getDataBody_asU8()); - Message headerMessage = Message - .getRootAsMessage(TypedArrayHelper.wrap(data.getDataHeader_asU8())); - if (body.limit() == 0 && headerMessage.headerType() != MessageHeader.RecordBatch) { - // a subscription stream presently ignores schemas and other message types - // TODO hang on to the schema to better handle the now-Utf8 columns - return; - } - RecordBatch header = (RecordBatch) headerMessage.header(new RecordBatch()); - BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - TypedArrayHelper.wrap(data.getAppMetadata_asU8())); - if (barrageMessageWrapper.msgType() == BarrageMessageType.None) { - // continue previous message, just read RecordBatch - appendAndMaybeFlush(header, body); - } else { - assert barrageMessageWrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata; - BarrageUpdateMetadata barrageUpdate = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - barrageMessageWrapper.msgPayloadAsByteBuffer()); - startAndMaybeFlush(barrageUpdate.isSnapshot(), header, body, barrageUpdate, isViewport, - columnTypes); - } - } - - private DeltaUpdatesBuilder nextDeltaUpdates; - private DeltaUpdates deferredDeltaUpdates; - - private void appendAndMaybeFlush(RecordBatch header, ByteBuffer body) { - // using existing barrageUpdate, append to the current snapshot/delta - assert nextDeltaUpdates != null; - boolean shouldFlush = nextDeltaUpdates.appendRecordBatch(header, body); - if (shouldFlush) { - DeltaUpdates updates = nextDeltaUpdates.build(); - nextDeltaUpdates = null; - - if (state.getTableDef().getAttributes().isBlinkTable()) { - // blink tables remove all rows from the previous step, if there are no adds this step - // then defer removal until new data arrives -- this makes blink tables GUI friendly - if (updates.getAdded().isEmpty()) { - if (deferredDeltaUpdates != null) { - final RangeSet removed = deferredDeltaUpdates.getRemoved(); - updates.getRemoved().rangeIterator().forEachRemaining(removed::addRange); - } else { - deferredDeltaUpdates = updates; - } - return; - } else if (deferredDeltaUpdates != null) { - assert updates.getRemoved().isEmpty() - : "Blink table received two consecutive remove rowsets"; - updates.setRemoved(deferredDeltaUpdates.getRemoved()); - deferredDeltaUpdates = null; - } - } - incrementalUpdates(state.getHandle(), updates); - } - } - - private void startAndMaybeFlush(boolean isSnapshot, RecordBatch header, ByteBuffer body, - BarrageUpdateMetadata barrageUpdate, boolean isViewport, String[] columnTypes) { - if (isSnapshot) { - TableSnapshot snapshot = - createSnapshot(header, body, barrageUpdate, isViewport, columnTypes); - - // for now we always expect snapshots to arrive in a single payload - initialSnapshot(state.getHandle(), snapshot); - } else { - nextDeltaUpdates = deltaUpdates(barrageUpdate, isViewport, columnTypes); - appendAndMaybeFlush(header, body); - } - } - }); - stream.onStatus(err -> { - checkStatus(err); - if (!err.isOk() && !err.isTransportError()) { - state.setResolution(ClientTableState.ResolutionState.FAILED, err.getDetails()); - } - }); - BiDiStream oldStream = subscriptionStreams.put(state, stream); - if (oldStream != null) { - // cancel any old stream, we presently expect a fresh instance - oldStream.end(); - oldStream.cancel(); - } } } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java index 1b26f2ccadb..877c7e09330 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java @@ -15,16 +15,16 @@ public class WebBarrageMessage { public static class ModColumnData { public RangeSet rowsModified; - public Class type; - public Class componentType; + // public Class type; + // public Class componentType; public ArrayList> data; - public ChunkType chunkType; + // public ChunkType chunkType; } public static class AddColumnData { - public Class type; - public Class componentType; + // public Class type; + // public Class componentType; public ArrayList> data; - public ChunkType chunkType; + // public ChunkType chunkType; } public long firstSeq = -1; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index 47a1587f107..cd496cf18b5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -101,8 +101,8 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp msg.addColumnData = new WebBarrageMessage.AddColumnData[columnTypes.length]; for (int ci = 0; ci < msg.addColumnData.length; ++ci) { msg.addColumnData[ci] = new WebBarrageMessage.AddColumnData(); - msg.addColumnData[ci].type = columnTypes[ci]; - msg.addColumnData[ci].componentType = componentTypes[ci]; + // msg.addColumnData[ci].type = columnTypes[ci]; + // msg.addColumnData[ci].componentType = componentTypes[ci]; msg.addColumnData[ci].data = new ArrayList<>(); // create an initial chunk of the correct size @@ -118,8 +118,8 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp msg.modColumnData = new WebBarrageMessage.ModColumnData[metadata.modColumnNodesLength()]; for (int ci = 0; ci < msg.modColumnData.length; ++ci) { msg.modColumnData[ci] = new WebBarrageMessage.ModColumnData(); - msg.modColumnData[ci].type = columnTypes[ci]; - msg.modColumnData[ci].componentType = componentTypes[ci]; + // msg.modColumnData[ci].type = columnTypes[ci]; + // msg.modColumnData[ci].componentType = componentTypes[ci]; msg.modColumnData[ci].data = new ArrayList<>(); final BarrageModColumnMetadata mcd = metadata.modColumnNodes(ci); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index 3f752d3796a..1857ed4f1fb 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -16,7 +16,6 @@ import io.deephaven.web.shared.data.columns.*; import jsinterop.base.Js; import org.apache.arrow.flatbuf.Buffer; -import org.apache.arrow.flatbuf.Field; import org.apache.arrow.flatbuf.FieldNode; import org.apache.arrow.flatbuf.KeyValue; import org.apache.arrow.flatbuf.Message; @@ -86,47 +85,7 @@ public static InitialTableDefinition readTableDefinition(Schema schema) { public static ColumnDefinition[] readColumnDefinitions(Schema schema) { ColumnDefinition[] cols = new ColumnDefinition[(int) schema.fieldsLength()]; for (int i = 0; i < schema.fieldsLength(); i++) { - cols[i] = new ColumnDefinition(); - Field f = schema.fields(i); - Map fieldMetadata = - keyValuePairs("deephaven:", f.customMetadataLength(), f::customMetadata); - cols[i].setName(f.name()); - cols[i].setColumnIndex(i); - cols[i].setType(fieldMetadata.get("type")); - cols[i].setIsSortable("true".equals(fieldMetadata.get("isSortable"))); - cols[i].setStyleColumn("true".equals(fieldMetadata.get("isStyle"))); - cols[i].setFormatColumn("true".equals(fieldMetadata.get("isDateFormat")) - || "true".equals(fieldMetadata.get("isNumberFormat"))); - cols[i].setForRow("true".equals(fieldMetadata.get("isRowStyle"))); - - String formatColumnName = fieldMetadata.get("dateFormatColumn"); - if (formatColumnName == null) { - formatColumnName = fieldMetadata.get("numberFormatColumn"); - } - cols[i].setFormatColumnName(formatColumnName); - - cols[i].setStyleColumnName(fieldMetadata.get("styleColumn")); - - if (fieldMetadata.containsKey("inputtable.isKey")) { - cols[i].setInputTableKeyColumn("true".equals(fieldMetadata.get("inputtable.isKey"))); - } - - cols[i].setDescription(fieldMetadata.get("description")); - - cols[i].setPartitionColumn("true".equals(fieldMetadata.get("isPartitioning"))); - - cols[i].setHierarchicalExpandByColumn( - "true".equals(fieldMetadata.get("hierarchicalTable.isExpandByColumn"))); - cols[i].setHierarchicalRowDepthColumn( - "true".equals(fieldMetadata.get("hierarchicalTable.isRowDepthColumn"))); - cols[i].setHierarchicalRowExpandedColumn( - "true".equals(fieldMetadata.get("hierarchicalTable.isRowExpandedColumn"))); - cols[i].setRollupAggregatedNodeColumn( - "true".equals(fieldMetadata.get("rollupTable.isAggregatedNodeColumn"))); - cols[i].setRollupConstituentNodeColumn( - "true".equals(fieldMetadata.get("rollupTable.isConstituentNodeColumn"))); - cols[i].setRollupGroupByColumn("true".equals(fieldMetadata.get("rollupTable.isGroupByColumn"))); - cols[i].setRollupAggregationInputColumn(fieldMetadata.get("rollupTable.aggregationInputColumnName")); + cols[i] = new ColumnDefinition(i, schema.fields(i)); } return cols; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index c24001c2c2a..5c1fc2400e9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -3,14 +3,18 @@ // package io.deephaven.web.client.api.barrage.data; +import com.google.flatbuffers.FlatBufferBuilder; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.web.client.api.barrage.CompressedRangeSetReader; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; +import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.BitSet; @@ -19,7 +23,55 @@ import java.util.PrimitiveIterator; import java.util.TreeMap; +/** + * In contrast to the server implementation, the JS API holds the "table" as distinct from the "subscription", so that + * developers are acutely aware of extra async costs in requesting data, and can clearly indicate how much data is + * requested. This class represents a barrage subscription for the JS API, and exposes access to the data presently + * available on the client. + */ public abstract class WebBarrageSubscription { + + public static final boolean COLUMNS_AS_LIST = false; + public static final int MAX_MESSAGE_SIZE = 10_000_000; + public static final int BATCH_SIZE = 100_000; + + public static WebBarrageSubscription subscribe(ClientTableState cts, ViewportChangedHandler viewportChangedHandler, + DataChangedHandler dataChangedHandler) { + + if (cts.getTableDef().getAttributes().isBlinkTable()) { + return new BlinkImpl(cts, viewportChangedHandler, dataChangedHandler); + } + return new RedirectedImpl(cts, viewportChangedHandler, dataChangedHandler); + } + + public static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSet columns, @Nullable RangeSet viewport, + io.deephaven.extensions.barrage.BarrageSubscriptionOptions options) { + FlatBufferBuilder sub = new FlatBufferBuilder(1024); + int colOffset = BarrageSubscriptionRequest.createColumnsVector(sub, columns.toByteArray()); + int viewportOffset = 0; + if (viewport != null) { + viewportOffset = + BarrageSubscriptionRequest.createViewportVector(sub, CompressedRangeSetReader.writeRange(viewport)); + } + int optionsOffset = options.appendTo(sub); + int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(sub, tableTicket); + BarrageSubscriptionRequest.addColumns(sub, colOffset); + BarrageSubscriptionRequest.addViewport(sub, viewportOffset); + BarrageSubscriptionRequest.addSubscriptionOptions(sub, optionsOffset); + BarrageSubscriptionRequest.addTicket(sub, tableTicketOffset); + sub.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(sub)); + + return sub; + } + + public interface ViewportChangedHandler { + void onServerViewportChanged(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport); + } + public interface DataChangedHandler { + void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted, + BitSet modifiedColumnSet); + } + public interface WebDataSink { void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator); @@ -27,6 +79,8 @@ default void ensureCapacity(long size) {} } protected final ClientTableState state; + protected final ViewportChangedHandler viewportChangedHandler; + protected final DataChangedHandler dataChangedHandler; protected final RangeSet currentRowSet = RangeSet.empty(); protected long capacity = 0; @@ -36,22 +90,23 @@ default void ensureCapacity(long size) {} protected BitSet serverColumns; protected boolean serverReverseViewport; - public WebBarrageSubscription(ClientTableState state) { + public WebBarrageSubscription(ClientTableState state, ViewportChangedHandler viewportChangedHandler, + DataChangedHandler dataChangedHandler) { this.state = state; destSources = new WebDataSink[state.getTableDef().getColumns().length]; + this.viewportChangedHandler = viewportChangedHandler; + this.dataChangedHandler = dataChangedHandler; } - protected abstract void applyUpdates(WebBarrageMessage message); + public abstract void applyUpdates(WebBarrageMessage message); protected void updateServerViewport(RangeSet viewport, BitSet columns, boolean reverseViewport) { serverViewport = viewport; serverColumns = columns == null || columns.cardinality() == numColumns() ? null : columns; serverReverseViewport = reverseViewport; - - // TODO viewport change event? } - private int numColumns() { + protected int numColumns() { return getDefinition().getColumns().length; } @@ -63,6 +118,8 @@ public RangeSet getCurrentRowSet() { return currentRowSet; } + public abstract T getData(long key, int col); + protected boolean isSubscribedColumn(int ii) { return serverColumns == null || serverColumns.get(ii); } @@ -74,13 +131,14 @@ enum Mode { private final Mode mode; - public BlinkImpl(ClientTableState state) { - super(state); - mode = Mode.APPEND; + public BlinkImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, + DataChangedHandler dataChangedHandler) { + super(state, viewportChangedHandler, dataChangedHandler); + mode = Mode.BLINK; } @Override - protected void applyUpdates(WebBarrageMessage message) { + public void applyUpdates(WebBarrageMessage message) { if (message.isSnapshot) { updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); } @@ -115,6 +173,12 @@ protected void applyUpdates(WebBarrageMessage message) { assert !destIterator.hasNext(); } } + + if (message.isSnapshot) { + viewportChangedHandler.onServerViewportChanged(serverViewport, serverColumns, serverReverseViewport); + } + dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, RangeSet.empty(), message.shifted, + new BitSet(0)); } } @@ -122,12 +186,13 @@ public static class RedirectedImpl extends WebBarrageSubscription { private RangeSet freeset = new RangeSet(); private final TreeMap redirectedIndexes = new TreeMap<>(); - public RedirectedImpl(ClientTableState state) { - super(state); + public RedirectedImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, + DataChangedHandler dataChangedHandler) { + super(state, viewportChangedHandler, dataChangedHandler); } @Override - protected void applyUpdates(WebBarrageMessage message) { + public void applyUpdates(WebBarrageMessage message) { if (message.isSnapshot) { updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); } @@ -150,7 +215,7 @@ protected void applyUpdates(WebBarrageMessage message) { // Apply shifts // Shift moved rows in the redir index - boolean hasReverseShift = false; + boolean hasReverseShift = COLUMNS_AS_LIST; final ShiftedRange[] shiftedRanges = message.shifted; RangeSetBulkHelper currentRowsetShifter = new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); @@ -241,12 +306,15 @@ protected void applyUpdates(WebBarrageMessage message) { } } + BitSet modifiedColumnSet = new BitSet(numColumns()); for (int ii = 0; ii < message.modColumnData.length; ii++) { WebBarrageMessage.ModColumnData column = message.modColumnData[ii]; if (column.rowsModified.isEmpty()) { continue; } + modifiedColumnSet.set(ii); + PrimitiveIterator.OfLong destIterator = column.rowsModified.indexIterator(); for (int j = 0; j < column.data.size(); j++) { Chunk chunk = column.data.get(j); @@ -260,13 +328,19 @@ protected void applyUpdates(WebBarrageMessage message) { newPopulated.rangeIterator().forEachRemaining(newPopulated::removeRange); freeRows(populatedRows); } + + if (message.isSnapshot) { + viewportChangedHandler.onServerViewportChanged(serverViewport, serverColumns, serverReverseViewport); + } + dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, totalMods, message.shifted, + modifiedColumnSet); } private RangeSet getFreeRows(long size) { if (size <= 0) { return RangeSet.empty(); } - boolean needsResizing = false; + boolean needsResizing = COLUMNS_AS_LIST; final RangeSet result; if (capacity == 0) { capacity = Long.highestOneBit(Math.max(size * 2, 8)); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/def/ColumnDefinition.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/def/ColumnDefinition.java index 604f80fcd77..76b04376ea5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/def/ColumnDefinition.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/def/ColumnDefinition.java @@ -4,108 +4,101 @@ package io.deephaven.web.client.api.barrage.def; import io.deephaven.web.client.api.Column; +import org.apache.arrow.flatbuf.Field; import java.util.Map; +import static io.deephaven.web.client.api.barrage.WebBarrageUtils.keyValuePairs; + public class ColumnDefinition { - private int columnIndex; - private String name; - private String type; - - private boolean isSortable; - - private String styleColumn; - private String formatColumn; - - private boolean isStyleColumn; - private boolean isFormatColumn; - private boolean isNumberFormatColumn; - private boolean isPartitionColumn; - private boolean isHierarchicalExpandByColumn; - private boolean isHierarchicalRowDepthColumn; - private boolean isHierarchicalRowExpandedColumn; - private boolean isRollupAggregatedNodeColumn; - private boolean isRollupConstituentNodeColumn; - private boolean isRollupGroupByColumn; - private String rollupAggregationInputColumn; - - // Indicates that this is a style column for the row - private boolean forRow; - private boolean isInputTableKeyColumn; - private String description; + private final Field field; + private final int columnIndex; + private final String type; + + private final boolean isSortable; + + private final String styleColumn; + private final String formatColumn; + + private final boolean isStyleColumn; + private final boolean isFormatColumn; + private final boolean isPartitionColumn; + private final boolean isHierarchicalExpandByColumn; + private final boolean isHierarchicalRowDepthColumn; + private final boolean isHierarchicalRowExpandedColumn; + private final boolean isRollupAggregatedNodeColumn; + private final boolean isRollupConstituentNodeColumn; + private final boolean isRollupGroupByColumn; + private final String rollupAggregationInputColumn; + + // Indicates that this is a style column for the whole row + private final boolean forRow; + private final boolean isInputTableKeyColumn; + private final String description; + + public ColumnDefinition(int index, Field field) { + Map fieldMetadata = + keyValuePairs("deephaven:", field.customMetadataLength(), field::customMetadata); + this.field = field; + columnIndex = index; + type = fieldMetadata.get("type"); + isSortable = "true".equals(fieldMetadata.get("isSortable")); + isStyleColumn = "true".equals(fieldMetadata.get("isStyle")); + isFormatColumn = "true".equals(fieldMetadata.get("isDateFormat")) + || "true".equals(fieldMetadata.get("isNumberFormat")); + forRow = "true".equals(fieldMetadata.get("isRowStyle")); + + String formatColumnName = fieldMetadata.get("dateFormatColumn"); + if (formatColumnName == null) { + formatColumnName = fieldMetadata.get("numberFormatColumn"); + } + formatColumn = formatColumnName; - public String getName() { - return name; - } + styleColumn = fieldMetadata.get("styleColumn"); - public int getColumnIndex() { - return columnIndex; + isInputTableKeyColumn = "true".equals(fieldMetadata.get("inputtable.isKey")); + + this.description = fieldMetadata.get("description"); + + isPartitionColumn = "true".equals(fieldMetadata.get("isPartitioning")); + + isHierarchicalExpandByColumn = "true".equals(fieldMetadata.get("hierarchicalTable.isExpandByColumn")); + isHierarchicalRowDepthColumn = "true".equals(fieldMetadata.get("hierarchicalTable.isRowDepthColumn")); + isHierarchicalRowExpandedColumn = "true".equals(fieldMetadata.get("hierarchicalTable.isRowExpandedColumn")); + isRollupAggregatedNodeColumn = "true".equals(fieldMetadata.get("rollupTable.isAggregatedNodeColumn")); + isRollupConstituentNodeColumn = "true".equals(fieldMetadata.get("rollupTable.isConstituentNodeColumn")); + isRollupGroupByColumn = "true".equals(fieldMetadata.get("rollupTable.isGroupByColumn")); + rollupAggregationInputColumn = fieldMetadata.get("rollupTable.aggregationInputColumnName"); } - public void setColumnIndex(int columnIndex) { - this.columnIndex = columnIndex; + public String getName() { + return field.name(); } - public void setName(String name) { - this.name = name; + public int getColumnIndex() { + return columnIndex; } public String getType() { return type; } - public void setType(String type) { - this.type = type; - } - public boolean isSortable() { return isSortable; } - public void setIsSortable(boolean sortable) { - isSortable = sortable; - } - public boolean isStyleColumn() { return isStyleColumn; } - public void setStyleColumn(boolean styleColumn) { - isStyleColumn = styleColumn; - } - public boolean isFormatColumn() { return isFormatColumn; } - public void setFormatColumn(boolean formatColumn) { - isFormatColumn = formatColumn; - } - - /** - * @deprecated Use {@link #isFormatColumn()} - */ - @Deprecated - public boolean isNumberFormatColumn() { - return isNumberFormatColumn; - } - - /** - * @deprecated Use {@link #setFormatColumn(boolean)} - */ - @Deprecated - public void setNumberFormatColumn(boolean numberFormatColumn) { - isNumberFormatColumn = numberFormatColumn; - } - public boolean isPartitionColumn() { return isPartitionColumn; } - public void setPartitionColumn(boolean partitionColumn) { - isPartitionColumn = partitionColumn; - } - public boolean isVisible() { return !isStyleColumn() && !isFormatColumn() && !isRollupConstituentNodeColumn() && !isHierarchicalRowDepthColumn() && !isHierarchicalRowExpandedColumn(); @@ -115,38 +108,18 @@ public boolean isForRow() { return forRow; } - public void setForRow(boolean forRow) { - this.forRow = forRow; - } - public String getFormatColumnName() { return formatColumn; } - public void setFormatColumnName(String formatColumn) { - this.formatColumn = formatColumn; - } - public String getStyleColumnName() { return styleColumn; } - public void setStyleColumnName(String styleColumn) { - this.styleColumn = styleColumn; - } - - public void setInputTableKeyColumn(boolean inputTableKeyColumn) { - this.isInputTableKeyColumn = inputTableKeyColumn; - } - public boolean isInputTableKeyColumn() { return isInputTableKeyColumn; } - public void setDescription(String description) { - this.description = description; - } - public String getDescription() { return description; } @@ -161,10 +134,10 @@ public Column makeJsColumn(int index, Map return makeColumn(index, this, - format == null || !format.isNumberFormatColumn() ? null : format.getColumnIndex(), + format == null || !format.isFormatColumn() ? null : format.getColumnIndex(), style == null ? null : style.getColumnIndex(), isPartitionColumn(), - format == null || format.isNumberFormatColumn() ? null : format.getColumnIndex(), + format == null || format.isFormatColumn() ? null : format.getColumnIndex(), getDescription(), isInputTableKeyColumn()); } @@ -181,55 +154,28 @@ public boolean isHierarchicalExpandByColumn() { return isHierarchicalExpandByColumn; } - public void setHierarchicalExpandByColumn(boolean hierarchicalExpandByColumn) { - isHierarchicalExpandByColumn = hierarchicalExpandByColumn; - } - public boolean isHierarchicalRowDepthColumn() { return isHierarchicalRowDepthColumn; } - public void setHierarchicalRowDepthColumn(boolean hierarchicalRowDepthColumn) { - isHierarchicalRowDepthColumn = hierarchicalRowDepthColumn; - } - public boolean isHierarchicalRowExpandedColumn() { return isHierarchicalRowExpandedColumn; } - public void setHierarchicalRowExpandedColumn(boolean hierarchicalRowExpandedColumn) { - isHierarchicalRowExpandedColumn = hierarchicalRowExpandedColumn; - } - public boolean isRollupAggregatedNodeColumn() { return isRollupAggregatedNodeColumn; } - public void setRollupAggregatedNodeColumn(boolean rollupAggregatedNodeColumn) { - isRollupAggregatedNodeColumn = rollupAggregatedNodeColumn; - } - public boolean isRollupConstituentNodeColumn() { return isRollupConstituentNodeColumn; } - public void setRollupConstituentNodeColumn(boolean rollupConstituentNodeColumn) { - isRollupConstituentNodeColumn = rollupConstituentNodeColumn; - } - public boolean isRollupGroupByColumn() { return isRollupGroupByColumn; } - public void setRollupGroupByColumn(boolean rollupGroupByColumn) { - isRollupGroupByColumn = rollupGroupByColumn; - } - public String getRollupAggregationInputColumn() { return rollupAggregationInputColumn; } - public void setRollupAggregationInputColumn(String rollupAggregationInputColumn) { - this.rollupAggregationInputColumn = rollupAggregationInputColumn; - } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java index 84e4dc1c758..b6cff055bbf 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java @@ -12,10 +12,7 @@ import io.deephaven.web.shared.data.*; import io.deephaven.web.shared.data.columns.ColumnData; import jsinterop.annotations.JsFunction; -import jsinterop.annotations.JsIgnore; -import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsProperty; -import jsinterop.annotations.JsType; import jsinterop.base.Any; import jsinterop.base.Js; import jsinterop.base.JsArrayLike; @@ -28,6 +25,7 @@ import java.util.TreeMap; import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; +@Deprecated public class SubscriptionTableData { @JsFunction private interface ArrayCopy { @@ -493,8 +491,8 @@ public Format getFormat(Column column) { JsArray rowStyle = Js.uncheckedCast(data[rowStyleColumn]); rowColors = rowStyle.getAtAsAny(redirectedIndex).asLong(); } - if (column.getFormatColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatColumnIndex()]); + if (column.getFormatStringColumnIndex() != null) { + JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); numberFormat = formatStrings.getAtAsAny(redirectedIndex).asString(); } if (column.getFormatStringColumnIndex() != null) { @@ -617,8 +615,8 @@ public Format getFormat(long index, Column column) { JsArray rowStyle = Js.uncheckedCast(data[rowStyleColumn]); rowColors = rowStyle.getAtAsAny(redirectedIndex).asLong(); } - if (column.getFormatColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatColumnIndex()]); + if (column.getFormatStringColumnIndex() != null) { + JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); numberFormat = formatStrings.getAtAsAny(redirectedIndex).asString(); } if (column.getFormatStringColumnIndex() != null) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java index f94aa9a5775..365f007711f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java @@ -3,18 +3,43 @@ // package io.deephaven.web.client.api.subscription; +import com.google.flatbuffers.FlatBufferBuilder; +import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; +import com.vertispan.tsdefs.annotations.TsTypeRef; import elemental2.core.JsArray; -import elemental2.promise.Promise; +import elemental2.dom.CustomEventInit; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.web.client.api.Column; +import io.deephaven.web.client.api.Format; import io.deephaven.web.client.api.HasEventHandling; +import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; -import io.deephaven.web.shared.data.DeltaUpdates; -import io.deephaven.web.shared.data.TableSnapshot; +import io.deephaven.web.client.api.LongWrapper; +import io.deephaven.web.client.api.TableData; +import io.deephaven.web.client.api.WorkerConnection; +import io.deephaven.web.client.api.barrage.WebBarrageMessage; +import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; +import io.deephaven.web.client.api.barrage.WebBarrageUtils; +import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; +import io.deephaven.web.client.api.barrage.stream.BiDiStream; +import io.deephaven.web.client.fu.JsSettings; +import io.deephaven.web.client.state.ClientTableState; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; import jsinterop.annotations.JsIgnore; -import jsinterop.annotations.JsMethod; +import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsProperty; import jsinterop.annotations.JsType; +import jsinterop.base.Any; +import jsinterop.base.Js; +import jsinterop.base.JsArrayLike; + +import java.io.IOException; +import java.util.BitSet; import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; @@ -41,51 +66,295 @@ public class TableSubscription extends HasEventHandling { public static final String EVENT_UPDATED = "updated"; - // column defs in this subscription + private final ClientTableState state; + private final int rowStyleColumn; private JsArray columns; - // holder for data - private SubscriptionTableData data; + private BitSet columnBitSet; + private BarrageSubscriptionOptions options; + + private final BiDiStream doExchange; + private final WebBarrageSubscription barrageSubscription; - // table created for this subscription - private Promise copy; + private boolean subscriptionReady; - // copy from the initially given table so we don't need to way @JsIgnore public TableSubscription(JsArray columns, JsTable existingTable, Double updateIntervalMs) { + state = existingTable.state(); + state.retain(this); + this.columns = columns; + rowStyleColumn = existingTable.state().getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + : existingTable.state().getRowFormatColumn().getIndex(); - copy = existingTable.copy(false).then(table -> new Promise<>((resolve, reject) -> { - table.state().onRunning(newState -> { - // TODO handle updateInterval core#188 - table.internalSubscribe(columns, this); + WorkerConnection connection = existingTable.getConnection(); + doExchange = + connection.streamFactory().create( + headers -> connection.flightServiceClient().doExchange(headers), + (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), + (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, + c::apply), + new FlightData()); - resolve.onInvoke(table); - }, table::close); - })); + doExchange.onData(this::onFlightData); + // TODO handle stream ending, error - this.columns = columns; - Integer rowStyleColumn = existingTable.state().getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : existingTable.state().getRowFormatColumn().getIndex(); - this.data = new SubscriptionTableData(columns, rowStyleColumn, this); + changeSubscription(columns, updateIntervalMs); + // TODO going to need "started change" so we don't let data escape when still updating + barrageSubscription = WebBarrageSubscription.subscribe(state, this::onViewportChange, this::onDataChanged); } - // public void changeSubscription(JsArray columns) { - // copy.then(t ->{ - // t.internalSubscribe(columns, this); - // return Promise.resolve(t); - // }); - // this.columns = columns; - // } + private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted, + BitSet modifiedColumnSet) { + if (!subscriptionReady) { + return; + } + // TODO if this was a snapshot (or subscriptionReady was false for some interval), we probably need to + // notify of the entire table as a single big change - @JsIgnore - public void handleSnapshot(TableSnapshot snapshot) { - data.handleSnapshot(snapshot); + // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... + UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); + CustomEventInit event = CustomEventInit.create(); + event.setDetail(detail); + fireEvent(TableSubscription.EVENT_UPDATED, event); } - @JsIgnore - public void handleDelta(DeltaUpdates delta) { - data.handleDelta(delta); + @TsInterface + @TsName(namespace = "dh") + public class SubscriptionRow implements TableData.Row { + private final long index; + public LongWrapper indexCached; + + public SubscriptionRow(long index) { + this.index = index; + } + + @Override + public LongWrapper getIndex() { + if (indexCached == null) { + indexCached = LongWrapper.of(index); + } + return indexCached; + } + + @Override + public Any get(Column column) { + return barrageSubscription.getData(index, column.getIndex()); + } + + @Override + public Format getFormat(Column column) { + long cellColors = 0; + long rowColors = 0; + String numberFormat = null; + String formatString = null; + if (column.getStyleColumnIndex() != null) { + cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + } + if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { + rowColors = barrageSubscription.getData(index, rowStyleColumn); + } + if (column.getFormatStringColumnIndex() != null) { + numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + if (column.getFormatStringColumnIndex() != null) { + formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + return new Format(cellColors, rowColors, numberFormat, formatString); + } + } + + + @TsInterface + @TsName(name = "SubscriptionTableData", namespace = "dh") + public class UpdateEventData implements TableData { + private final JsRangeSet added; + private final JsRangeSet removed; + private final JsRangeSet modified; + + // cached copy in case it was requested, could be requested again + private JsArray allRows; + + public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + this.added = new JsRangeSet(added); + this.removed = new JsRangeSet(removed); + this.modified = new JsRangeSet(modified); + } + + /** + * A lazily computed array of all rows in the entire table + * + * @return {@link SubscriptionRow} array. + */ + @Override + public JsArray getRows() { + if (allRows == null) { + allRows = new JsArray<>(); + barrageSubscription.getCurrentRowSet().indexIterator().forEachRemaining((long index) -> { + allRows.push(new SubscriptionRow(index)); + }); + if (JsSettings.isDevMode()) { + assert allRows.length == barrageSubscription.getCurrentRowSet().size(); + } + } + return allRows; + } + + @Override + public Row get(int index) { + return this.get((long) index); + } + + /** + * Reads a row object from the table, from which any subscribed column can be read + * + * @param index + * @return {@link SubscriptionRow} + */ + @Override + public SubscriptionRow get(long index) { + return new SubscriptionRow(index); + } + + @Override + public Any getData(int index, Column column) { + return getData((long) index, column); + } + + /** + * a specific cell from the table, from the specified row and column + * + * @param index + * @param column + * @return Any + */ + @Override + public Any getData(long index, Column column) { + return barrageSubscription.getData(index, column.getIndex()); + } + + /** + * the Format to use for a cell from the specified row and column + * + * @param index + * @param column + * @return {@link Format} + */ + @Override + public Format getFormat(int index, Column column) { + return getFormat((long) index, column); + } + + @Override + public Format getFormat(long index, Column column) { + long cellColors = 0; + long rowColors = 0; + String numberFormat = null; + String formatString = null; + if (column.getStyleColumnIndex() != null) { + cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + } + if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { + rowColors = barrageSubscription.getData(index, rowStyleColumn); + } + if (column.getFormatStringColumnIndex() != null) { + numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + if (column.getFormatStringColumnIndex() != null) { + formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + return new Format(cellColors, rowColors, numberFormat, formatString); + } + + @Override + public JsArray getColumns() { + return columns; + } + + /** + * The ordered set of row indexes added since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getAdded() { + return added; + } + + /** + * The ordered set of row indexes removed since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getRemoved() { + return removed; + } + + /** + * The ordered set of row indexes updated since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getModified() { + return modified; + } + + @JsProperty + public JsRangeSet getFullIndex() { + return new JsRangeSet(barrageSubscription.getCurrentRowSet()); + } + } + + private void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { + if (serverViewport != null || serverReverseViewport) { + throw new IllegalStateException("Not a viewport subscription"); + } + subscriptionReady = serverColumns.equals(this.columnBitSet); + } + + private void onFlightData(FlightData data) { + WebBarrageStreamReader reader = new WebBarrageStreamReader(); + WebBarrageMessage message; + try { + message = reader.parseFrom(options, null, state.chunkTypes(), state.columnTypes(), state.componentTypes(), + data); + } catch (IOException e) { + throw new RuntimeException(e); + } + if (message != null) { + // This payload resulted in an update to the table's contents, inform the subscription + barrageSubscription.applyUpdates(message); + } + } + + /** + * Updates the subscription to use the given columns and update interval. + * + * @param columns the new columns to subscribe to + * @param updateIntervalMs the new update interval, or null/omit to use the default of one second + */ + public void changeSubscription(JsArray columns, @JsNullable Double updateIntervalMs) { + // TODO validate that we can change updateinterval + this.columns = columns; + this.columnBitSet = state.makeBitset(Js.uncheckedCast(columns)); + this.options = BarrageSubscriptionOptions.builder() + .batchSize(WebBarrageSubscription.BATCH_SIZE) + .maxMessageSize(WebBarrageSubscription.MAX_MESSAGE_SIZE) + .columnConversionMode(ColumnConversionMode.Stringify) + .minUpdateIntervalMs(updateIntervalMs == null ? 0 : (int) (double) updateIntervalMs) + .columnsAsList(false) + .build(); + FlatBufferBuilder request = WebBarrageSubscription.subscriptionRequest( + Js.uncheckedCast(state.getHandle().getTicket()), + columnBitSet, + null, + options); + FlightData subscriptionRequest = new FlightData(); + subscriptionRequest + .setAppMetadata(WebBarrageUtils.wrapMessage(request, BarrageMessageType.BarrageSubscriptionRequest)); + doExchange.send(subscriptionRequest); } /** @@ -102,9 +371,7 @@ public JsArray getColumns() { * Stops the subscription on the server. */ public void close() { - copy.then(table -> { - table.close(); - return Promise.resolve(table); - }); + doExchange.end(); + doExchange.cancel(); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 294f2826963..e37c77b06d5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -66,12 +66,15 @@ *

* Note that if the caller does close an instance, this shuts down the JsTable's use of this (while the converse is not * true), providing a way to stop the server from streaming updates to the client. - * + *

* This object serves as a "handle" to a subscription, allowing it to be acted on directly or canceled outright. If you * retain an instance of this, you have two choices - either only use it to call `close()` on it to stop the table's * viewport without creating a new one, or listen directly to this object instead of the table for data events, and * always call `close()` when finished. Calling any method on this object other than close() will result in it * continuing to live on after `setViewport` is called on the original table, or after the table is modified. + * + * + * */ @TsInterface @TsName(namespace = "dh") @@ -133,7 +136,7 @@ public TableViewportSubscription(double firstRow, double lastRow, Column[] colum }); // TODO handle updateInterval core#188 Column[] columnsToSub = table.isBlinkTable() ? Js.uncheckedCast(table.getColumns()) : columns; - table.setInternalViewport(firstRow, lastRow, columnsToSub); + // table.setInternalViewport(firstRow, lastRow, columnsToSub); // Listen for events and refire them on ourselves, optionally on the original table table.addEventListener(JsTable.EVENT_UPDATED, this::refire); @@ -213,7 +216,7 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column copy.then(table -> { if (!table.isBlinkTable()) { // we only set blink table viewports once; and that's in the constructor - table.setInternalViewport(firstRow, lastRow, columns); + // table.setInternalViewport(firstRow, lastRow, columns); } return Promise.resolve(table); }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index 3ca96d1ed2e..82fc7d2ea6e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -341,207 +341,207 @@ public JsArray getColumns() { return columns; } - public MergeResults merge(DeltaUpdates updates) { - if (offset == -1 && updates.getIncludedAdditions().size() > 0) { - offset = updates.getIncludedAdditions().getFirstRow(); - } - final MergeResults updated = new MergeResults(); - - // First we remove rows by nulling them out. - updates.getRemoved().indexIterator().forEachRemaining((long removedIndex) -> { - int internalOffset = (int) (removedIndex - offset); - if (internalOffset < 0 || internalOffset >= length) { - return; - } - for (int i = 0; i < data.length; i++) { - JsArray existingColumnData = Js.uncheckedCast(data[i]); - if (existingColumnData == null) { - continue; - } - existingColumnData.setAt(internalOffset, NULL_SENTINEL); - } - updated.removed.add(internalOffset); - }); - - // Now we shift data around. - boolean hasReverseShift = false; - final ShiftedRange[] shiftedRanges = updates.getShiftedRanges(); - - // must apply shifts in mem-move semantics; so we shift forward from right to left first - for (int si = shiftedRanges.length - 1; si >= 0; --si) { - final ShiftedRange shiftedRange = shiftedRanges[si]; - final long shiftDelta = shiftedRange.getDelta(); - if (shiftDelta < 0) { - hasReverseShift = true; - continue; - } - - final long beginAsLong = Math.max(shiftedRange.getRange().getFirst() - offset, 0); - final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); - if (end < beginAsLong) { - // this range is out of our viewport - continue; - } - - // long math is expensive; so convert to int early/once - final int begin = (int) beginAsLong; - - // iterate backward and move them forward - for (int j = end; j >= begin; --j) { - for (int i = 0; i < data.length; ++i) { - final JsArray existingColumnData = Js.uncheckedCast(data[i]); - if (existingColumnData == null) { - continue; - } - - final long internalOffsetAsLong = (j + shiftDelta); - if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int - final int internalOffset = (int) internalOffsetAsLong; - updated.added.add(internalOffset); - Any toMove = existingColumnData.getAt(j); - existingColumnData.setAt(internalOffset, toMove); - } - - updated.removed.add(j); - existingColumnData.setAt(j, NULL_SENTINEL); - } - } - } - if (hasReverseShift) { - // then we shift in reverse from left to right - for (int si = 0; si < shiftedRanges.length; ++si) { - final ShiftedRange shiftedRange = shiftedRanges[si]; - final long shiftDelta = shiftedRange.getDelta(); - if (shiftDelta > 0) { - continue; - } - - final long begin = Math.max(shiftedRange.getRange().getFirst() - offset, 0); - final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); - if (end < begin) { - // this range is out of our viewport - continue; - } - - // iterate forward and move them backward (note: since begin is <= end, we now know it fits in an int) - for (int j = (int) begin; j <= end; ++j) { - for (int i = 0; i < data.length; ++i) { - final JsArray existingColumnData = Js.uncheckedCast(data[i]); - if (existingColumnData == null) { - continue; - } - - final long internalOffsetAsLong = j + shiftDelta; - if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int - final int internalOffset = (int) internalOffsetAsLong; - updated.added.add(internalOffset); - existingColumnData.setAt(internalOffset, existingColumnData.getAt(j)); - } - - updated.removed.add(j); - existingColumnData.setAt(j, NULL_SENTINEL); - } - } - } - } - - DeltaUpdates.ColumnModifications[] serializedModifications = updates.getSerializedModifications(); - for (int modifiedColIndex = 0; modifiedColIndex < serializedModifications.length; modifiedColIndex++) { - final DeltaUpdates.ColumnModifications modifiedColumn = serializedModifications[modifiedColIndex]; - final OfLong it = modifiedColumn == null ? null : modifiedColumn.getRowsIncluded().indexIterator(); - - if (it == null || !it.hasNext()) { - continue; - } - - // look for a local Column which matches this index so we know how to clean it - final Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); - final JsArray updatedColumnData = - Js.uncheckedCast(cleanData(modifiedColumn.getValues().getData(), column)); - final JsArray existingColumnData = Js.uncheckedCast(data[modifiedColumn.getColumnIndex()]); - if (updatedColumnData.length == 0) { - continue; - } - - // for each change provided for this column, replace the values in our store - int i = 0; - while (it.hasNext()) { - long modifiedOffset = it.nextLong(); - int internalOffset = (int) (modifiedOffset - offset); - if (internalOffset < 0 || internalOffset >= maxLength) { - i++; - continue;// data we don't need to see, either meant for another table, or we just sent a viewport - // update - } - existingColumnData.setAt(internalOffset, updatedColumnData.getAtAsAny(i)); - updated.modified.add(internalOffset); - i++; - } - } - - if (!updates.getIncludedAdditions().isEmpty()) { - DeltaUpdates.ColumnAdditions[] serializedAdditions = updates.getSerializedAdditions(); - for (int addedColIndex = 0; addedColIndex < serializedAdditions.length; addedColIndex++) { - DeltaUpdates.ColumnAdditions addedColumn = serializedAdditions[addedColIndex]; - - Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); - final JsArray addedColumnData = - Js.uncheckedCast(cleanData(addedColumn.getValues().getData(), column)); - final JsArray existingColumnData = Js.uncheckedCast(data[addedColumn.getColumnIndex()]); - if (addedColumnData.length == 0) { - continue; - } - - int i = 0; - OfLong it = updates.getIncludedAdditions().indexIterator(); - while (it.hasNext()) { - long addedOffset = it.nextLong(); - int internalOffset = (int) (addedOffset - offset); - if (internalOffset < 0 || internalOffset >= maxLength) { - i++; - continue;// data we don't need to see, either meant for another table, or we just sent a - // viewport update - } - assert internalOffset < existingColumnData.length; - - Any existing = existingColumnData.getAt(internalOffset); - if (existing == NULL_SENTINEL || internalOffset >= length) { - // space was set aside or was left at the end of the array for this value, it is a new addition - updated.added.add(internalOffset); - } else { - // we're overwriting some existing value - updated.modified.add(internalOffset); - } - existingColumnData.setAt(internalOffset, addedColumnData.getAtAsAny(i)); - i++; - } - } - } - - // exclude added items from being marked as modified, since we're hiding shifts from api consumers - updated.modified.removeAll(updated.added); - - // Any position which was both added and removed should instead be marked as modified, this cleans - // up anything excluded above that didn't otherwise make sense - for (Iterator it = updated.removed.iterator(); it.hasNext();) { - int ii = it.next(); - if (updated.added.remove(ii)) { - it.remove(); - updated.modified.add(ii); - } - } - - length = length + updated.added.size() - updated.removed.size(); - assert 0 <= length && length <= maxLength; - - // Viewport footprint should be small enough that we can afford to see if this update corrupted our view of the - // world: - assert !dataContainsNullSentinels(); - - return updated; - } + // public MergeResults merge(DeltaUpdates updates) { + // if (offset == -1 && updates.getIncludedAdditions().size() > 0) { + // offset = updates.getIncludedAdditions().getFirstRow(); + // } + // final MergeResults updated = new MergeResults(); + // + // // First we remove rows by nulling them out. + // updates.getRemoved().indexIterator().forEachRemaining((long removedIndex) -> { + // int internalOffset = (int) (removedIndex - offset); + // if (internalOffset < 0 || internalOffset >= length) { + // return; + // } + // for (int i = 0; i < data.length; i++) { + // JsArray existingColumnData = Js.uncheckedCast(data[i]); + // if (existingColumnData == null) { + // continue; + // } + // existingColumnData.setAt(internalOffset, NULL_SENTINEL); + // } + // updated.removed.add(internalOffset); + // }); + // + // // Now we shift data around. + // boolean hasReverseShift = false; + // final ShiftedRange[] shiftedRanges = updates.getShiftedRanges(); + // + // // must apply shifts in mem-move semantics; so we shift forward from right to left first + // for (int si = shiftedRanges.length - 1; si >= 0; --si) { + // final ShiftedRange shiftedRange = shiftedRanges[si]; + // final long shiftDelta = shiftedRange.getDelta(); + // if (shiftDelta < 0) { + // hasReverseShift = true; + // continue; + // } + // + // final long beginAsLong = Math.max(shiftedRange.getRange().getFirst() - offset, 0); + // final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); + // if (end < beginAsLong) { + // // this range is out of our viewport + // continue; + // } + // + // // long math is expensive; so convert to int early/once + // final int begin = (int) beginAsLong; + // + // // iterate backward and move them forward + // for (int j = end; j >= begin; --j) { + // for (int i = 0; i < data.length; ++i) { + // final JsArray existingColumnData = Js.uncheckedCast(data[i]); + // if (existingColumnData == null) { + // continue; + // } + // + // final long internalOffsetAsLong = (j + shiftDelta); + // if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { + // // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int + // final int internalOffset = (int) internalOffsetAsLong; + // updated.added.add(internalOffset); + // Any toMove = existingColumnData.getAt(j); + // existingColumnData.setAt(internalOffset, toMove); + // } + // + // updated.removed.add(j); + // existingColumnData.setAt(j, NULL_SENTINEL); + // } + // } + // } + // if (hasReverseShift) { + // // then we shift in reverse from left to right + // for (int si = 0; si < shiftedRanges.length; ++si) { + // final ShiftedRange shiftedRange = shiftedRanges[si]; + // final long shiftDelta = shiftedRange.getDelta(); + // if (shiftDelta > 0) { + // continue; + // } + // + // final long begin = Math.max(shiftedRange.getRange().getFirst() - offset, 0); + // final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); + // if (end < begin) { + // // this range is out of our viewport + // continue; + // } + // + // // iterate forward and move them backward (note: since begin is <= end, we now know it fits in an int) + // for (int j = (int) begin; j <= end; ++j) { + // for (int i = 0; i < data.length; ++i) { + // final JsArray existingColumnData = Js.uncheckedCast(data[i]); + // if (existingColumnData == null) { + // continue; + // } + // + // final long internalOffsetAsLong = j + shiftDelta; + // if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { + // // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int + // final int internalOffset = (int) internalOffsetAsLong; + // updated.added.add(internalOffset); + // existingColumnData.setAt(internalOffset, existingColumnData.getAt(j)); + // } + // + // updated.removed.add(j); + // existingColumnData.setAt(j, NULL_SENTINEL); + // } + // } + // } + // } + // + // DeltaUpdates.ColumnModifications[] serializedModifications = updates.getSerializedModifications(); + // for (int modifiedColIndex = 0; modifiedColIndex < serializedModifications.length; modifiedColIndex++) { + // final DeltaUpdates.ColumnModifications modifiedColumn = serializedModifications[modifiedColIndex]; + // final OfLong it = modifiedColumn == null ? null : modifiedColumn.getRowsIncluded().indexIterator(); + // + // if (it == null || !it.hasNext()) { + // continue; + // } + // + // // look for a local Column which matches this index so we know how to clean it + // final Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); + // final JsArray updatedColumnData = + // Js.uncheckedCast(cleanData(modifiedColumn.getValues().getData(), column)); + // final JsArray existingColumnData = Js.uncheckedCast(data[modifiedColumn.getColumnIndex()]); + // if (updatedColumnData.length == 0) { + // continue; + // } + // + // // for each change provided for this column, replace the values in our store + // int i = 0; + // while (it.hasNext()) { + // long modifiedOffset = it.nextLong(); + // int internalOffset = (int) (modifiedOffset - offset); + // if (internalOffset < 0 || internalOffset >= maxLength) { + // i++; + // continue;// data we don't need to see, either meant for another table, or we just sent a viewport + // // update + // } + // existingColumnData.setAt(internalOffset, updatedColumnData.getAtAsAny(i)); + // updated.modified.add(internalOffset); + // i++; + // } + // } + // + // if (!updates.getIncludedAdditions().isEmpty()) { + // DeltaUpdates.ColumnAdditions[] serializedAdditions = updates.getSerializedAdditions(); + // for (int addedColIndex = 0; addedColIndex < serializedAdditions.length; addedColIndex++) { + // DeltaUpdates.ColumnAdditions addedColumn = serializedAdditions[addedColIndex]; + // + // Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); + // final JsArray addedColumnData = + // Js.uncheckedCast(cleanData(addedColumn.getValues().getData(), column)); + // final JsArray existingColumnData = Js.uncheckedCast(data[addedColumn.getColumnIndex()]); + // if (addedColumnData.length == 0) { + // continue; + // } + // + // int i = 0; + // OfLong it = updates.getIncludedAdditions().indexIterator(); + // while (it.hasNext()) { + // long addedOffset = it.nextLong(); + // int internalOffset = (int) (addedOffset - offset); + // if (internalOffset < 0 || internalOffset >= maxLength) { + // i++; + // continue;// data we don't need to see, either meant for another table, or we just sent a + // // viewport update + // } + // assert internalOffset < existingColumnData.length; + // + // Any existing = existingColumnData.getAt(internalOffset); + // if (existing == NULL_SENTINEL || internalOffset >= length) { + // // space was set aside or was left at the end of the array for this value, it is a new addition + // updated.added.add(internalOffset); + // } else { + // // we're overwriting some existing value + // updated.modified.add(internalOffset); + // } + // existingColumnData.setAt(internalOffset, addedColumnData.getAtAsAny(i)); + // i++; + // } + // } + // } + // + // // exclude added items from being marked as modified, since we're hiding shifts from api consumers + // updated.modified.removeAll(updated.added); + // + // // Any position which was both added and removed should instead be marked as modified, this cleans + // // up anything excluded above that didn't otherwise make sense + // for (Iterator it = updated.removed.iterator(); it.hasNext();) { + // int ii = it.next(); + // if (updated.added.remove(ii)) { + // it.remove(); + // updated.modified.add(ii); + // } + // } + // + // length = length + updated.added.size() - updated.removed.size(); + // assert 0 <= length && length <= maxLength; + // + // // Viewport footprint should be small enough that we can afford to see if this update corrupted our view of the + // // world: + // assert !dataContainsNullSentinels(); + // + // return updated; + // } private boolean dataContainsNullSentinels() { for (int i = 0; i < data.length; i++) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java index ade086f4503..83e9d7e3ab6 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java @@ -73,8 +73,8 @@ public Format getFormat(Column column) { if (rowStyleColumn != null) { rowColors = rowStyleColumn.getAtAsAny(offsetInSnapshot).asLong(); } - if (column.getFormatColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(dataColumns[column.getFormatColumnIndex()]); + if (column.getFormatStringColumnIndex() != null) { + JsArray formatStrings = Js.uncheckedCast(dataColumns[column.getFormatStringColumnIndex()]); numberFormat = formatStrings.getAtAsAny(offsetInSnapshot).asString(); } if (column.getFormatStringColumnIndex() != null) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java index 5aed4d5c121..722daed1b95 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java @@ -3,11 +3,8 @@ // package io.deephaven.web.client.state; -import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.state.HasTableState; -import io.deephaven.web.shared.data.RangeSet; -import io.deephaven.web.shared.data.Viewport; /** * An active binding describes the link between a {@link JsTable} and the {@link ClientTableState} it is currently @@ -60,10 +57,6 @@ public class ActiveTableBinding implements HasTableState { * new table must copy all bindings to the new table. */ private PausedTableBinding rollback; - private Viewport viewport; - private RangeSet rows; - private Column[] columns; - private boolean subscriptionPending; private ActiveTableBinding( JsTable table, @@ -234,51 +227,4 @@ private void copyRollbacks(ActiveTableBinding sub) { public PausedTableBinding getPaused() { return paused; } - - public Viewport getSubscription() { - return viewport; - } - - public void setViewport(Viewport viewport) { - this.viewport = viewport; - } - - public RangeSet setDesiredViewport(long firstRow, long lastRow, Column[] columns) { - this.rows = RangeSet.ofRange(firstRow, lastRow); - this.columns = columns; - subscriptionPending = true; - return rows; - } - - public void setDesiredSubscription(Column[] columns) { - assert this.rows == null; - this.columns = columns; - subscriptionPending = true; - } - - public RangeSet getRows() { - return rows; - } - - public Column[] getColumns() { - return columns; - } - - public void setSubscriptionPending(boolean subscriptionPending) { - this.subscriptionPending = subscriptionPending; - } - - public boolean isSubscriptionPending() { - return subscriptionPending; - } - - public void maybeReviveSubscription() { - if (subscriptionPending || viewport != null) { - if (rows != null) { - state.setDesiredViewport(table, rows.getFirstRow(), rows.getLastRow(), columns); - } else { - state.subscribe(table, columns); - } - } - } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 0afcb0e0b53..65a113a04b3 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -8,6 +8,7 @@ import elemental2.core.JsSet; import elemental2.core.Uint8Array; import elemental2.promise.Promise; +import io.deephaven.chunk.ChunkType; import io.deephaven.javascript.proto.dhinternal.browserheaders.BrowserHeaders; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse; import io.deephaven.web.client.api.*; @@ -26,13 +27,10 @@ import jsinterop.base.Js; import org.apache.arrow.flatbuf.Schema; +import java.time.Instant; import java.util.*; -import java.util.function.BinaryOperator; -import java.util.function.Function; -import java.util.stream.Collector; import java.util.stream.Collectors; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.keyValuePairs; import static io.deephaven.web.client.fu.JsItr.iterate; /** @@ -61,6 +59,67 @@ * Consider making this a js type with restricted, read-only property access. */ public final class ClientTableState extends TableConfig { + public ChunkType[] chunkTypes() { + // This is roughly ReinterpretUtils.maybeConvertToWritablePrimitiveChunkType, and should + // be rewritten to skip the trip through Class + return Arrays.stream(columnTypes()).map(dataType -> { + if (dataType == Boolean.class || dataType == boolean.class) { + return ChunkType.Byte; + } + // if (dataType == Instant.class) { + // // Note that storing ZonedDateTime as a primitive is lossy on the time zone. + // return ChunkType.Long; + // } + return ChunkType.fromElementType(dataType); + }).toArray(ChunkType[]::new); + } + + public Class[] columnTypes() { + return Arrays.stream(getColumns()) + .map(Column::getType) + .map(t -> { + switch (t) { + case "boolean": + case "java.lang.Boolean": + return boolean.class; + case "char": + case "java.lang.Character": + return char.class; + case "byte": + case "java.lang.Byte": + return byte.class; + case "int": + case "java.lang.Integer": + return int.class; + case "short": + case "java.lang.Short": + return short.class; + case "long": + case "java.lang.Long": + return long.class; + case "java.lang.Double": + case "double": + return double.class; + default: + return Object.class; + } + }) + .toArray(Class[]::new); + } + + public Class[] componentTypes() { + // The only componentType that matters is byte.class + return Arrays.stream(getColumns()).map(Column::getType).map(t -> { + if (!t.endsWith("[]")) { + return null; + } + if (t.equals("io.deephaven.vector.ByteVector[]")) { + return byte.class; + } + return Object.class; + }).toArray(Class[]::new); + } + public enum ResolutionState { /** * Table has been created on the client, but client does not yet have a handle ID referring to the table on the @@ -125,7 +184,6 @@ public enum ResolutionState { // A bit of state management private String failMsg; - private boolean subscribed; private Double queuedSize; // Leftovers from Table.StackEntry @@ -361,10 +419,12 @@ public void setSize(long size) { JsConsumer doSetSize = table -> { long localSize = size; - final ActiveTableBinding binding = getActiveBinding(table); - if (binding != null && table.isBlinkTable() && binding.getRows() != null) { - localSize = Math.min(size, binding.getRows().size()); - } + + // TODO relocate this to the subscription for a blink table + // final ActiveTableBinding binding = getActiveBinding(table); + // if (binding != null && table.isBlinkTable() && binding.getRows() != null) { + // localSize = Math.min(size, binding.getRows().size()); + // } table.setSize(localSize); }; @@ -640,12 +700,10 @@ public Column findColumn(String key) { } /** - * @return true if there are no tables bound to this state. - * - * If a table that had a subscription for this state was orphaned by a pending request, we want to clear the - * subscription immediately so it becomes inert (immediately remove the subscription), but we may need to - * rollback the request, and we don't want to release the handle until the pending request is finished - * (whereupon we will remove the binding). + * @return true if there are no tables bound to this state. If a table that had a subscription for this state was + * orphaned by a pending request, we want to clear the subscription immediately so it becomes inert + * (immediately remove the subscription), but we may need to rollback the request, and we don't want to + * release the handle until the pending request is finished (whereupon we will remove the binding). */ public boolean isEmpty() { return active.size == 0 && paused.size == 0 && retainers.size == 0; @@ -668,14 +726,6 @@ public void unretain(Object retainer) { connection.scheduleCheck(this); } - public boolean isActiveEmpty() { - return active.size == 0; - } - - public boolean hasNoSubscriptions() { - return JsItr.iterate(active.values()).allMatch(binding -> binding.getSubscription() == null); - } - public boolean hasSort(Sort candidate) { return getSorts().contains(candidate); } @@ -725,58 +775,58 @@ public boolean releaseTable(JsTable table) { return had; } - public void setDesiredViewport(JsTable table, long firstRow, long lastRow, Column[] columns) { - touch(); - final ActiveTableBinding sub = active.get(table); - assert sub != null : "You cannot set the desired viewport on a non-active state + table combination"; - final RangeSet rows = sub.setDesiredViewport(firstRow, lastRow, columns); - // let event loop eat multiple viewport sets and only apply the last one (winner of who gets spot in map) - LazyPromise.runLater(() -> { - if (sub.getRows() == rows) { - // winner! now, on to the next hurdle... ensuring we have columns. - // TODO: have an onColumnsReady callback, for cases when we know we're only waiting on - // non-column-modifying operations - onRunning(self -> { - if (sub.getRows() == rows) { - // winner again! - applyViewport(sub); - } - }, JsRunnable.doNothing()); - } - }); - } - - public void subscribe(JsTable table, Column[] columns) { - touch(); - ActiveTableBinding binding = active.get(table); - assert binding != null : "No active binding found for table " + table; - - onRunning(self -> { - binding.setSubscriptionPending(true); - - if (getHandle().equals(table.getHandle())) { - binding.setViewport(new Viewport(null, makeBitset(columns))); - table.getConnection().scheduleCheck(this); - } - }, JsRunnable.doNothing()); - } - - private void applyViewport(ActiveTableBinding sub) { - sub.setSubscriptionPending(false); - final JsTable table = sub.getTable(); - // make sure we're still the tail entry before trying to apply viewport - assert isRunning() : "Do not call this method unless you are in a running state! " + this; - if (getHandle().equals(table.getHandle())) { - final RangeSet rows = sub.getRows(); - Column[] desired = sub.getColumns(); - if (Js.isFalsy(desired)) { - desired = getColumns(); - } - Viewport vp = new Viewport(rows, makeBitset(desired)); - sub.setViewport(vp); - table.refreshViewport(this, vp); - } - } + // private void setDesiredViewport(JsTable table, long firstRow, long lastRow, Column[] columns) { + // touch(); + // final ActiveTableBinding sub = active.get(table); + // assert sub != null : "You cannot set the desired viewport on a non-active state + table combination"; + // final RangeSet rows = sub.setDesiredViewport(firstRow, lastRow, columns); + // // let event loop eat multiple viewport sets and only apply the last one (winner of who gets spot in map) + // LazyPromise.runLater(() -> { + // if (sub.getRows() == rows) { + // // winner! now, on to the next hurdle... ensuring we have columns. + // // TODO: have an onColumnsReady callback, for cases when we know we're only waiting on + // // non-column-modifying operations + // onRunning(self -> { + // if (sub.getRows() == rows) { + // // winner again! + // applyViewport(sub); + // } + // }, JsRunnable.doNothing()); + // } + // }); + // } + + // private void subscribe(JsTable table, Column[] columns) { + // touch(); + // ActiveTableBinding binding = active.get(table); + // assert binding != null : "No active binding found for table " + table; + // + // onRunning(self -> { + // binding.setSubscriptionPending(true); + // + // if (getHandle().equals(table.getHandle())) { + // binding.setViewport(new Viewport(null, makeBitset(columns))); + // table.getConnection().scheduleCheck(this); + // } + // }, JsRunnable.doNothing()); + // } + // + // private void applyViewport(ActiveTableBinding sub) { + // sub.setSubscriptionPending(false); + // final JsTable table = sub.getTable(); + // // make sure we're still the tail entry before trying to apply viewport + // assert isRunning() : "Do not call this method unless you are in a running state! " + this; + // if (getHandle().equals(table.getHandle())) { + // final RangeSet rows = sub.getRows(); + // Column[] desired = sub.getColumns(); + // if (Js.isFalsy(desired)) { + // desired = getColumns(); + // } + // Viewport vp = new Viewport(rows, makeBitset(desired)); + // sub.setViewport(vp); + // table.refreshViewport(this, vp); + // } + // } public BitSet makeBitset(Column[] columns) { BitSet bitSet = new BitSet(getTableDef().getColumns().length); @@ -794,15 +844,15 @@ public MappedIterable getBoundTables() { return iterate(active.keys()).plus(iterate(paused.keys())); } - public void forActiveSubscriptions(JsBiConsumer callback) { - JsItr.forEach(active, (table, binding) -> { - if (binding.getSubscription() != null) { - assert binding.getTable() == table - : "Corrupt binding between " + table + " and " + binding + " in " + active; - callback.apply((JsTable) table, binding.getSubscription()); - } - }); - } + // private void forActiveSubscriptions(JsBiConsumer callback) { + // JsItr.forEach(active, (table, binding) -> { + // if (binding.getSubscription() != null) { + // assert binding.getTable() == table + // : "Corrupt binding between " + table + " and " + binding + " in " + active; + // callback.apply((JsTable) table, binding.getSubscription()); + // } + // }); + // } public void forActiveTables(JsConsumer callback) { JsItr.forEach(active, (table, sub) -> { @@ -828,23 +878,23 @@ public void forActiveLifecycles(JsConsumer callback) { } } - public void handleDelta(DeltaUpdates updates) { - assert size != SIZE_UNINITIALIZED : "Received delta before receiving initial size"; - setSize(size + updates.getAdded().size() - updates.getRemoved().size()); - forActiveSubscriptions((table, subscription) -> { - assert table.getHandle().equals(handle); - // we are the active state of this table, so forward along the delta. - table.handleDelta(this, updates); - }); - } - - public void setSubscribed(boolean subscribed) { - this.subscribed = subscribed; - } - - public boolean isSubscribed() { - return subscribed; - } + // public void handleDelta(DeltaUpdates updates) { + // assert size != SIZE_UNINITIALIZED : "Received delta before receiving initial size"; + // setSize(size + updates.getAdded().size() - updates.getRemoved().size()); + // forActiveSubscriptions((table, subscription) -> { + // assert table.getHandle().equals(handle); + // // we are the active state of this table, so forward along the delta. + // table.handleDelta(this, updates); + // }); + // } + // + // public void setSubscribed(boolean subscribed) { + // this.subscribed = subscribed; + // } + // + // public boolean isSubscribed() { + // return subscribed; + // } @Override public String toString() { @@ -921,9 +971,9 @@ public void unpause(JsTable table) { private void refreshSubscription(ActiveTableBinding sub) { assert active.get(sub.getTable()) == sub; - if (!sub.isSubscriptionPending()) { - sub.maybeReviveSubscription(); - } + // if (!sub.isSubscriptionPending()) { + // sub.maybeReviveSubscription(); + // } } public MappedIterable ancestors() { @@ -935,7 +985,7 @@ public MappedIterable reversed() { } /** - * Look through paused tables to see if any of them have been + * Look through paused tables to see if any of them have been closed. */ public void cleanup() { assert JsItr.iterate(active.keys()).allMatch(t -> !t.isAlive() || t.state() == this) From 70c9d2bd9ccfac74a300058fadb4fd0bdb52ddf1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Apr 2024 20:02:18 -0500 Subject: [PATCH 022/219] Second half of hack-and-slash - compiles and works for primitives! --- .../io/deephaven/web/client/api/JsTable.java | 26 +- .../web/client/api/JsTotalsTable.java | 7 +- .../api/barrage/WebBarrageStreamReader.java | 38 +- .../barrage/data/WebBarrageSubscription.java | 165 +++++++- .../AbstractTableSubscription.java | 385 ++++++++++++++++++ .../api/subscription/TableSubscription.java | 335 +-------------- .../TableViewportSubscription.java | 373 +++++++++-------- .../client/api/subscription/ViewportData.java | 1 - .../web/super/java/lang/ref/Reference.java | 13 +- .../web/client/api/TotalsTableTestGwt.java | 18 +- 10 files changed, 799 insertions(+), 562 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index be9e4cce22f..e8d6df43df9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -479,11 +479,14 @@ public String getDescription() { */ @JsProperty public double getTotalSize() { - TableViewportSubscription subscription = subscriptions.get(getHandle()); - if (subscription != null && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { - // only ask the viewport for the size if it is alive and ticking - return subscription.totalSize(); - } + // TODO note to me: I don't think this ever made sense, its not like we held open a subscription for the same + // table + // without the filter... + // TableViewportSubscription subscription = subscriptions.get(getHandle()); + // if (subscription != null && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { + // // only ask the viewport for the size if it is alive and ticking + // return subscription.totalSize(); + // } return getHeadState().getSize(); } @@ -676,14 +679,14 @@ public JsArray getCustomColumns() { * Overload for Java (since JS just omits the optional params) */ public TableViewportSubscription setViewport(double firstRow, double lastRow) { - return setViewport(firstRow, lastRow, null, null); + return setViewport(firstRow, lastRow, null, null, null); } /** * Overload for Java (since JS just omits the optional param) */ public TableViewportSubscription setViewport(double firstRow, double lastRow, JsArray columns) { - return setViewport(firstRow, lastRow, columns, null); + return setViewport(firstRow, lastRow, columns, null, null); } /** @@ -702,13 +705,14 @@ public TableViewportSubscription setViewport(double firstRow, double lastRow, Js @JsMethod public TableViewportSubscription setViewport(double firstRow, double lastRow, @JsOptional @JsNullable JsArray columns, - @JsOptional @JsNullable Double updateIntervalMs) { - Column[] columnsCopy = columns != null ? Js.uncheckedCast(columns.slice()) : null; + @JsOptional @JsNullable Double updateIntervalMs, + @JsOptional @JsNullable Boolean isReverseViewport) { + Column[] columnsCopy = columns != null ? Js.uncheckedCast(columns.slice()) : state().getColumns(); ClientTableState currentState = state(); TableViewportSubscription activeSubscription = subscriptions.get(getHandle()); if (activeSubscription != null && activeSubscription.getStatus() != TableViewportSubscription.Status.DONE) { // hasn't finished, lets reuse it - activeSubscription.setInternalViewport(firstRow, lastRow, columnsCopy, updateIntervalMs); + activeSubscription.setInternalViewport(firstRow, lastRow, columnsCopy, updateIntervalMs, isReverseViewport); return activeSubscription; } else { // In the past, we left the old sub going until the new one was ready, then started the new one. But now, @@ -2012,7 +2016,7 @@ public void setState(final ClientTableState state) { TableViewportSubscription existingSubscription = subscriptions.remove(was.getHandle()); if (existingSubscription != null && existingSubscription.getStatus() != TableViewportSubscription.Status.DONE) { - JsLog.debug("closing old viewport", state(), existingSubscription.state()); + // JsLog.debug("closing old viewport", state(), existingSubscription.state()); // with the replacement state successfully running, we can shut down the old viewport (unless // something external retained it) existingSubscription.internalClose(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java index a3dc0ad6390..96cc6387fed 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java @@ -17,6 +17,7 @@ import io.deephaven.web.shared.fu.RemoverFn; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsMethod; +import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsOptional; import jsinterop.annotations.JsProperty; import jsinterop.base.Js; @@ -65,7 +66,7 @@ public JsTotalsTable(JsTable wrappedTable, String directive, JsArray gro public void refreshViewport() { if (firstRow != null && lastRow != null) { - setViewport(firstRow, lastRow, Js.uncheckedCast(columns), updateIntervalMs); + setViewport(firstRow, lastRow, Js.uncheckedCast(columns), updateIntervalMs, null); } } @@ -108,12 +109,12 @@ public JsTotalsTableConfig getTotalsTableConfig() { */ @JsMethod public void setViewport(double firstRow, double lastRow, @JsOptional JsArray columns, - @JsOptional Double updateIntervalMs) { + @JsOptional Double updateIntervalMs, @JsOptional @JsNullable Boolean isReverseViewport) { this.firstRow = firstRow; this.lastRow = lastRow; this.columns = columns != null ? Js.uncheckedCast(columns.slice()) : null; this.updateIntervalMs = updateIntervalMs; - wrappedTable.setViewport(firstRow, lastRow, columns, updateIntervalMs); + wrappedTable.setViewport(firstRow, lastRow, columns, updateIntervalMs, isReverseViewport); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index cd496cf18b5..e90ca633eac 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -4,6 +4,7 @@ package io.deephaven.web.client.api.barrage; import com.google.common.io.LittleEndianDataInputStream; +import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; @@ -136,18 +137,35 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp } } } - ByteBuffer body = TypedArrayHelper.wrap(flightData.getDataBody_asU8()); - if (!body.hasRemaining()) { - throw new IllegalStateException("Missing body tag"); - } + + // TODO double check if this is the right place if (header == null) { throw new IllegalStateException("Missing metadata header; cannot decode body"); } - - if (header.headerType() != MessageHeader.RecordBatch) { - throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); + byte headerType = header.headerType(); + if (headerType == MessageHeader.Schema) { + // there is no body and our clients do not want to see schema messages + return null; + } + if (headerType != MessageHeader.RecordBatch) { + throw new IllegalStateException("Only know how to decode Schema/RecordBatch messages"); } + ByteBuffer body = TypedArrayHelper.wrap(flightData.getDataBody_asU8()); + // final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); + // DomGlobal.console.log(headerType, MessageHeader.names[headerType]); + // DomGlobal.console.log("body.limit()", body.limit()); + // DomGlobal.console.log("batch.length()", batch.length()); + // DomGlobal.console.log("batch.buffersLength()", batch.buffersLength()); + // for (int i = 0; i < batch.buffersLength(); i++) { + // DomGlobal.console.log("batch.buffers("+i+").offset()", batch.buffers(i).offset()); + // DomGlobal.console.log("batch.buffers("+i+").length()", batch.buffers(i).length()); + // } + + // if (!body.hasRemaining()) { + // throw new IllegalStateException("Missing body tag"); + // } + // throw an error when no app metadata (snapshots now provide by default) if (msg == null) { throw new IllegalStateException( @@ -244,12 +262,6 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp numModRowsRead += batch.length(); } - - if (header.headerType() == MessageHeader.Schema) { - // there is no body and our clients do not want to see schema messages - return null; - } - if (numAddRowsRead == numAddRowsTotal && numModRowsRead == numModRowsTotal) { final WebBarrageMessage retval = msg; msg = null; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 5c1fc2400e9..f0df617fb4c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -4,16 +4,25 @@ package io.deephaven.web.client.api.barrage.data; import com.google.flatbuffers.FlatBufferBuilder; +import elemental2.core.JsArray; +import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.DoubleChunk; +import io.deephaven.chunk.IntChunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.web.client.api.barrage.CompressedRangeSetReader; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; +import io.deephaven.web.client.fu.JsData; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; +import jsinterop.base.Any; +import jsinterop.base.Js; import org.jetbrains.annotations.Nullable; import java.util.Arrays; @@ -38,14 +47,99 @@ public abstract class WebBarrageSubscription { public static WebBarrageSubscription subscribe(ClientTableState cts, ViewportChangedHandler viewportChangedHandler, DataChangedHandler dataChangedHandler) { + WebDataSink[] dataSinks = new WebDataSink[cts.columnTypes().length]; + for (int i = 0; i < dataSinks.length; i++) { + JsArray arr = JsData.newArray(cts.columnTypes()[i].getCanonicalName()); + switch (cts.chunkTypes()[i]) { + case Boolean: + break; + case Char: + break; + case Byte: + break; + case Short: + break; + case Int: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + IntChunk intChunk = data.asIntChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(intChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; + break; + case Long: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + LongChunk longChunk = data.asLongChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(longChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; + break; + case Float: + break; + case Double: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + DoubleChunk doubleChunk = data.asDoubleChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(doubleChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; + break; + case Object: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ObjectChunk objectChunk = data.asObjectChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(objectChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; + break; + } + } + if (cts.getTableDef().getAttributes().isBlinkTable()) { - return new BlinkImpl(cts, viewportChangedHandler, dataChangedHandler); + return new BlinkImpl(cts, viewportChangedHandler, dataChangedHandler, dataSinks); } - return new RedirectedImpl(cts, viewportChangedHandler, dataChangedHandler); + return new RedirectedImpl(cts, viewportChangedHandler, dataChangedHandler, dataSinks); } public static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSet columns, @Nullable RangeSet viewport, - io.deephaven.extensions.barrage.BarrageSubscriptionOptions options) { + io.deephaven.extensions.barrage.BarrageSubscriptionOptions options, boolean isReverseViewport) { FlatBufferBuilder sub = new FlatBufferBuilder(1024); int colOffset = BarrageSubscriptionRequest.createColumnsVector(sub, columns.toByteArray()); int viewportOffset = 0; @@ -55,10 +149,12 @@ public static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSet c } int optionsOffset = options.appendTo(sub); int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(sub, tableTicket); + BarrageSubscriptionRequest.startBarrageSubscriptionRequest(sub); BarrageSubscriptionRequest.addColumns(sub, colOffset); BarrageSubscriptionRequest.addViewport(sub, viewportOffset); BarrageSubscriptionRequest.addSubscriptionOptions(sub, optionsOffset); BarrageSubscriptionRequest.addTicket(sub, tableTicketOffset); + BarrageSubscriptionRequest.addReverseViewport(sub, isReverseViewport); sub.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(sub)); return sub; @@ -76,6 +172,8 @@ public interface WebDataSink { void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator); default void ensureCapacity(long size) {} + + T get(long position); } protected final ClientTableState state; @@ -91,9 +189,9 @@ default void ensureCapacity(long size) {} protected boolean serverReverseViewport; public WebBarrageSubscription(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler) { + DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { this.state = state; - destSources = new WebDataSink[state.getTableDef().getColumns().length]; + destSources = dataSinks; this.viewportChangedHandler = viewportChangedHandler; this.dataChangedHandler = dataChangedHandler; } @@ -118,6 +216,10 @@ public RangeSet getCurrentRowSet() { return currentRowSet; } + public RangeSet getServerViewport() { + return serverViewport; + } + public abstract T getData(long key, int col); protected boolean isSubscribedColumn(int ii) { @@ -132,8 +234,8 @@ enum Mode { private final Mode mode; public BlinkImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler) { - super(state, viewportChangedHandler, dataChangedHandler); + DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { + super(state, viewportChangedHandler, dataChangedHandler, dataSinks); mode = Mode.BLINK; } @@ -180,6 +282,11 @@ public void applyUpdates(WebBarrageMessage message) { dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, RangeSet.empty(), message.shifted, new BitSet(0)); } + + @Override + public T getData(long key, int col) { + return destSources[col].get(key); + } } public static class RedirectedImpl extends WebBarrageSubscription { @@ -187,8 +294,8 @@ public static class RedirectedImpl extends WebBarrageSubscription { private final TreeMap redirectedIndexes = new TreeMap<>(); public RedirectedImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler) { - super(state, viewportChangedHandler, dataChangedHandler); + DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { + super(state, viewportChangedHandler, dataChangedHandler, dataSinks); } @Override @@ -288,6 +395,7 @@ public void applyUpdates(WebBarrageMessage message) { } RangeSet destinationRowSet = getFreeRows(message.rowsIncluded.size()); + DomGlobal.console.log("freeRows", destinationRowSet.toString()); // RangeSet destinationRowSet = new RangeSet(); // message.rowsIncluded.indexIterator().forEachRemaining((long row) -> { // destinationRowSet.addRange(new Range(row, row)); @@ -297,6 +405,7 @@ public void applyUpdates(WebBarrageMessage message) { if (isSubscribedColumn(ii)) { WebBarrageMessage.AddColumnData column = message.addColumnData[ii]; PrimitiveIterator.OfLong destIterator = destinationRowSet.indexIterator(); + for (int j = 0; j < column.data.size(); j++) { Chunk chunk = column.data.get(j); destSources[ii].fillChunk(chunk, destIterator); @@ -304,6 +413,14 @@ public void applyUpdates(WebBarrageMessage message) { assert !destIterator.hasNext(); } } + // Add redirection mappings + PrimitiveIterator.OfLong srcIter = message.rowsIncluded.indexIterator(); + PrimitiveIterator.OfLong destIter = destinationRowSet.indexIterator(); + while (srcIter.hasNext()) { + assert destIter.hasNext(); + redirectedIndexes.put(srcIter.next(), destIter.next()); + } + assert !destIter.hasNext(); } BitSet modifiedColumnSet = new BitSet(numColumns()); @@ -325,7 +442,7 @@ public void applyUpdates(WebBarrageMessage message) { if (serverViewport != null) { assert populatedRows != null; RangeSet newPopulated = currentRowSet.subsetForPositions(serverViewport, serverReverseViewport); - newPopulated.rangeIterator().forEachRemaining(newPopulated::removeRange); + newPopulated.rangeIterator().forEachRemaining(populatedRows::removeRange); freeRows(populatedRows); } @@ -336,6 +453,11 @@ public void applyUpdates(WebBarrageMessage message) { modifiedColumnSet); } + @Override + public T getData(long key, int col) { + return this.destSources[col].get(redirectedIndexes.get(key)); + } + private RangeSet getFreeRows(long size) { if (size <= 0) { return RangeSet.empty(); @@ -351,28 +473,33 @@ private RangeSet getFreeRows(long size) { } else { result = new RangeSet(); Iterator iterator = freeset.rangeIterator(); - int required = (int) Math.min(size, freeset.size()); - while (required > 0) { + int required = (int) size; + while (required > 0 && iterator.hasNext()) { assert iterator.hasNext(); Range next = iterator.next(); - result.addRange( - next.size() < required ? next : new Range(next.getFirst(), next.getFirst() + required - 1)); + Range range = + next.size() < required ? next : new Range(next.getFirst(), next.getFirst() + required - 1); + result.addRange(range); + freeset.removeRange(range); required -= (int) next.size(); } - if (freeset.size() < size) { + if (required > 0) { // we need more, allocate extra, return some, grow the freeset for next time long usedSlots = capacity - freeset.size(); long prevCapacity = capacity; do { capacity *= 2; - } while ((capacity - usedSlots) < size); + } while ((capacity - usedSlots) < required); - result.addRange(new Range(prevCapacity, size - 1)); + result.addRange(new Range(prevCapacity, prevCapacity + required - 1)); freeset = new RangeSet(); - freeset.addRange(new Range(size, capacity - 1)); + if (capacity - prevCapacity > required) { + // extra was allocated for next time + freeset.addRange(new Range(prevCapacity + required, capacity - 1)); + } needsResizing = true; } } @@ -381,6 +508,8 @@ private RangeSet getFreeRows(long size) { Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); } + assert result.size() == size; + return result; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java new file mode 100644 index 00000000000..00fdb43808e --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -0,0 +1,385 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.subscription; + +import com.google.flatbuffers.FlatBufferBuilder; +import com.vertispan.tsdefs.annotations.TsInterface; +import com.vertispan.tsdefs.annotations.TsName; +import elemental2.core.JsArray; +import elemental2.dom.CustomEventInit; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.web.client.api.Column; +import io.deephaven.web.client.api.Format; +import io.deephaven.web.client.api.HasEventHandling; +import io.deephaven.web.client.api.JsRangeSet; +import io.deephaven.web.client.api.LongWrapper; +import io.deephaven.web.client.api.TableData; +import io.deephaven.web.client.api.WorkerConnection; +import io.deephaven.web.client.api.barrage.WebBarrageMessage; +import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; +import io.deephaven.web.client.api.barrage.WebBarrageUtils; +import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; +import io.deephaven.web.client.api.barrage.stream.BiDiStream; +import io.deephaven.web.client.fu.JsSettings; +import io.deephaven.web.client.state.ClientTableState; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; +import jsinterop.annotations.JsProperty; +import jsinterop.base.Any; +import jsinterop.base.Js; + +import java.io.IOException; +import java.util.BitSet; + +import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; + +public abstract class AbstractTableSubscription extends HasEventHandling { + /** + * Indicates that some new data is available on the client, either an initial snapshot or a delta update. The + * detail field of the event will contain a TableSubscriptionEventData detailing what has changed, or + * allowing access to the entire range of items currently in the subscribed columns. + */ + public static final String EVENT_UPDATED = "updated"; + + private final ClientTableState state; + private final WorkerConnection connection; + private final int rowStyleColumn; + private JsArray columns; + private BitSet columnBitSet; + private BarrageSubscriptionOptions options; + + private final BiDiStream doExchange; + private final WebBarrageSubscription barrageSubscription; + + private boolean subscriptionReady; + + public AbstractTableSubscription(ClientTableState state, WorkerConnection connection) { + state.retain(this); + this.state = state; + this.connection = connection; + rowStyleColumn = state.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + : state.getRowFormatColumn().getIndex(); + + doExchange = + connection.streamFactory().create( + headers -> connection.flightServiceClient().doExchange(headers), + (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), + (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, + c::apply), + new FlightData()); + + doExchange.onData(this::onFlightData); + // TODO handle stream ending, error + + // TODO going to need "started change" so we don't let data escape when still updating + barrageSubscription = WebBarrageSubscription.subscribe(state, this::onViewportChange, this::onDataChanged); + } + + protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray columns, Double updateIntervalMs, + boolean isReverseViewport) { + this.columns = columns; + this.columnBitSet = state.makeBitset(Js.uncheckedCast(columns)); + // TODO validate that we can change updateinterval + this.options = BarrageSubscriptionOptions.builder() + .batchSize(WebBarrageSubscription.BATCH_SIZE) + .maxMessageSize(WebBarrageSubscription.MAX_MESSAGE_SIZE) + .columnConversionMode(ColumnConversionMode.Stringify) + .minUpdateIntervalMs(updateIntervalMs == null ? 0 : (int) (double) updateIntervalMs) + .columnsAsList(false) + .build(); + FlatBufferBuilder request = WebBarrageSubscription.subscriptionRequest( + Js.uncheckedCast(state.getHandle().getTicket()), + columnBitSet, + viewport, + options, + isReverseViewport); + FlightData subscriptionRequest = new FlightData(); + subscriptionRequest + .setAppMetadata(WebBarrageUtils.wrapMessage(request, BarrageMessageType.BarrageSubscriptionRequest)); + doExchange.send(subscriptionRequest); + } + + protected ClientTableState state() { + return state; + } + + protected WorkerConnection connection() { + return connection; + } + + protected boolean isSubscriptionReady() { + return subscriptionReady; + } + + public double size() { + return barrageSubscription.getCurrentRowSet().size(); + } + + private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted, + BitSet modifiedColumnSet) { + if (!subscriptionReady) { + return; + } + + // TODO if this was a snapshot (or subscriptionReady was false for some interval), we probably need to + // notify of the entire table as a single big change + + notifyUpdate(rowsAdded, rowsRemoved, totalMods, shifted); + } + + protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { + // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... + UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); + CustomEventInit event = CustomEventInit.create(); + event.setDetail(detail); + fireEvent(TableSubscription.EVENT_UPDATED, event); + } + + @TsInterface + @TsName(namespace = "dh") + public class SubscriptionRow implements TableData.Row { + private final long index; + public LongWrapper indexCached; + + public SubscriptionRow(long index) { + this.index = index; + } + + @Override + public LongWrapper getIndex() { + if (indexCached == null) { + indexCached = LongWrapper.of(index); + } + return indexCached; + } + + @Override + public Any get(Column column) { + return barrageSubscription.getData(index, column.getIndex()); + } + + @Override + public Format getFormat(Column column) { + long cellColors = 0; + long rowColors = 0; + String numberFormat = null; + String formatString = null; + if (column.getStyleColumnIndex() != null) { + cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + } + if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { + rowColors = barrageSubscription.getData(index, rowStyleColumn); + } + if (column.getFormatStringColumnIndex() != null) { + numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + if (column.getFormatStringColumnIndex() != null) { + formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + return new Format(cellColors, rowColors, numberFormat, formatString); + } + } + + + @TsInterface + @TsName(name = "SubscriptionTableData", namespace = "dh") + public class UpdateEventData implements TableData { + private final JsRangeSet added; + private final JsRangeSet removed; + private final JsRangeSet modified; + + // cached copy in case it was requested, could be requested again + private JsArray allRows; + + // TODO not this + public double offset; + + public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + this.added = new JsRangeSet(added); + this.removed = new JsRangeSet(removed); + this.modified = new JsRangeSet(modified); + } + + /** + * The position of the first returned row. + * + * @return double + */ + @JsProperty + public double getOffset() { + return offset; + } + + /** + * A lazily computed array of all rows in the entire table + * + * @return {@link SubscriptionRow} array. + */ + @Override + public JsArray getRows() { + if (allRows == null) { + allRows = new JsArray<>(); + RangeSet positions = barrageSubscription.getCurrentRowSet() + .subsetForPositions(barrageSubscription.getServerViewport(), false); + positions.indexIterator().forEachRemaining((long index) -> { + allRows.push(new SubscriptionRow(index)); + }); + if (JsSettings.isDevMode()) { + assert allRows.length == positions.size(); + } + } + return allRows; + } + + @Override + public Row get(int index) { + return this.get((long) index); + } + + /** + * Reads a row object from the table, from which any subscribed column can be read + * + * @param index + * @return {@link SubscriptionRow} + */ + @Override + public SubscriptionRow get(long index) { + return new SubscriptionRow(index); + } + + @Override + public Any getData(int index, Column column) { + return getData((long) index, column); + } + + /** + * a specific cell from the table, from the specified row and column + * + * @param index + * @param column + * @return Any + */ + @Override + public Any getData(long index, Column column) { + return barrageSubscription.getData(index, column.getIndex()); + } + + /** + * the Format to use for a cell from the specified row and column + * + * @param index + * @param column + * @return {@link Format} + */ + @Override + public Format getFormat(int index, Column column) { + return getFormat((long) index, column); + } + + @Override + public Format getFormat(long index, Column column) { + long cellColors = 0; + long rowColors = 0; + String numberFormat = null; + String formatString = null; + if (column.getStyleColumnIndex() != null) { + cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + } + if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { + rowColors = barrageSubscription.getData(index, rowStyleColumn); + } + if (column.getFormatStringColumnIndex() != null) { + numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + if (column.getFormatStringColumnIndex() != null) { + formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + } + return new Format(cellColors, rowColors, numberFormat, formatString); + } + + @Override + public JsArray getColumns() { + return columns; + } + + /** + * The ordered set of row indexes added since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getAdded() { + return added; + } + + /** + * The ordered set of row indexes removed since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getRemoved() { + return removed; + } + + /** + * The ordered set of row indexes updated since the last update + * + * @return dh.RangeSet + */ + @JsProperty + public JsRangeSet getModified() { + return modified; + } + + @JsProperty + public JsRangeSet getFullIndex() { + return new JsRangeSet(barrageSubscription.getCurrentRowSet()); + } + } + + protected void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { + // if (serverViewport != null || serverReverseViewport) { + // throw new IllegalStateException("Not a viewport subscription"); + // } + subscriptionReady = (serverColumns == null && columnBitSet == null) + || (serverColumns == null && columnBitSet.cardinality() == state.getColumns().length) + || (serverColumns != null && serverColumns.equals(this.columnBitSet)); + } + + private void onFlightData(FlightData data) { + WebBarrageStreamReader reader = new WebBarrageStreamReader(); + WebBarrageMessage message; + try { + message = reader.parseFrom(options, null, state.chunkTypes(), state.columnTypes(), state.componentTypes(), + data); + } catch (IOException e) { + throw new RuntimeException(e); + } + if (message != null) { + // This payload resulted in an update to the table's contents, inform the subscription + barrageSubscription.applyUpdates(message); + } + } + + /** + * The columns that were subscribed to when this subscription was created + * + * @return {@link Column} + */ + public JsArray getColumns() { + return columns; + } + + /** + * Stops the subscription on the server. + */ + public void close() { + doExchange.end(); + doExchange.cancel(); + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java index 365f007711f..7ac6903f0ec 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java @@ -3,45 +3,13 @@ // package io.deephaven.web.client.api.subscription; -import com.google.flatbuffers.FlatBufferBuilder; -import com.vertispan.tsdefs.annotations.TsInterface; -import com.vertispan.tsdefs.annotations.TsName; -import com.vertispan.tsdefs.annotations.TsTypeRef; import elemental2.core.JsArray; -import elemental2.dom.CustomEventInit; -import io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; -import io.deephaven.extensions.barrage.ColumnConversionMode; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.web.client.api.Column; -import io.deephaven.web.client.api.Format; -import io.deephaven.web.client.api.HasEventHandling; -import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; -import io.deephaven.web.client.api.LongWrapper; -import io.deephaven.web.client.api.TableData; -import io.deephaven.web.client.api.WorkerConnection; -import io.deephaven.web.client.api.barrage.WebBarrageMessage; -import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; -import io.deephaven.web.client.api.barrage.WebBarrageUtils; -import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; -import io.deephaven.web.client.api.barrage.stream.BiDiStream; -import io.deephaven.web.client.fu.JsSettings; -import io.deephaven.web.client.state.ClientTableState; -import io.deephaven.web.shared.data.RangeSet; -import io.deephaven.web.shared.data.ShiftedRange; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsProperty; import jsinterop.annotations.JsType; -import jsinterop.base.Any; -import jsinterop.base.Js; -import jsinterop.base.JsArrayLike; - -import java.io.IOException; -import java.util.BitSet; - -import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; /** * Represents a non-viewport subscription to a table, and all data currently known to be present in the subscribed @@ -56,277 +24,13 @@ * viewports to make it less expensive to compute for large tables. */ @JsType(namespace = "dh") -public class TableSubscription extends HasEventHandling { - - /** - * Indicates that some new data is available on the client, either an initial snapshot or a delta update. The - * detail field of the event will contain a TableSubscriptionEventData detailing what has changed, or - * allowing access to the entire range of items currently in the subscribed columns. - */ - public static final String EVENT_UPDATED = "updated"; - - - private final ClientTableState state; - private final int rowStyleColumn; - private JsArray columns; - private BitSet columnBitSet; - private BarrageSubscriptionOptions options; - - private final BiDiStream doExchange; - private final WebBarrageSubscription barrageSubscription; - - private boolean subscriptionReady; +public final class TableSubscription extends AbstractTableSubscription { @JsIgnore public TableSubscription(JsArray columns, JsTable existingTable, Double updateIntervalMs) { - state = existingTable.state(); - state.retain(this); - this.columns = columns; - rowStyleColumn = existingTable.state().getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : existingTable.state().getRowFormatColumn().getIndex(); - - WorkerConnection connection = existingTable.getConnection(); - doExchange = - connection.streamFactory().create( - headers -> connection.flightServiceClient().doExchange(headers), - (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), - (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, - c::apply), - new FlightData()); - - doExchange.onData(this::onFlightData); - // TODO handle stream ending, error + super(existingTable.state(), existingTable.getConnection()); changeSubscription(columns, updateIntervalMs); - - // TODO going to need "started change" so we don't let data escape when still updating - barrageSubscription = WebBarrageSubscription.subscribe(state, this::onViewportChange, this::onDataChanged); - } - - private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted, - BitSet modifiedColumnSet) { - if (!subscriptionReady) { - return; - } - - // TODO if this was a snapshot (or subscriptionReady was false for some interval), we probably need to - // notify of the entire table as a single big change - - // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... - UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); - CustomEventInit event = CustomEventInit.create(); - event.setDetail(detail); - fireEvent(TableSubscription.EVENT_UPDATED, event); - } - - @TsInterface - @TsName(namespace = "dh") - public class SubscriptionRow implements TableData.Row { - private final long index; - public LongWrapper indexCached; - - public SubscriptionRow(long index) { - this.index = index; - } - - @Override - public LongWrapper getIndex() { - if (indexCached == null) { - indexCached = LongWrapper.of(index); - } - return indexCached; - } - - @Override - public Any get(Column column) { - return barrageSubscription.getData(index, column.getIndex()); - } - - @Override - public Format getFormat(Column column) { - long cellColors = 0; - long rowColors = 0; - String numberFormat = null; - String formatString = null; - if (column.getStyleColumnIndex() != null) { - cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); - } - if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - rowColors = barrageSubscription.getData(index, rowStyleColumn); - } - if (column.getFormatStringColumnIndex() != null) { - numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); - } - if (column.getFormatStringColumnIndex() != null) { - formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); - } - return new Format(cellColors, rowColors, numberFormat, formatString); - } - } - - - @TsInterface - @TsName(name = "SubscriptionTableData", namespace = "dh") - public class UpdateEventData implements TableData { - private final JsRangeSet added; - private final JsRangeSet removed; - private final JsRangeSet modified; - - // cached copy in case it was requested, could be requested again - private JsArray allRows; - - public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { - this.added = new JsRangeSet(added); - this.removed = new JsRangeSet(removed); - this.modified = new JsRangeSet(modified); - } - - /** - * A lazily computed array of all rows in the entire table - * - * @return {@link SubscriptionRow} array. - */ - @Override - public JsArray getRows() { - if (allRows == null) { - allRows = new JsArray<>(); - barrageSubscription.getCurrentRowSet().indexIterator().forEachRemaining((long index) -> { - allRows.push(new SubscriptionRow(index)); - }); - if (JsSettings.isDevMode()) { - assert allRows.length == barrageSubscription.getCurrentRowSet().size(); - } - } - return allRows; - } - - @Override - public Row get(int index) { - return this.get((long) index); - } - - /** - * Reads a row object from the table, from which any subscribed column can be read - * - * @param index - * @return {@link SubscriptionRow} - */ - @Override - public SubscriptionRow get(long index) { - return new SubscriptionRow(index); - } - - @Override - public Any getData(int index, Column column) { - return getData((long) index, column); - } - - /** - * a specific cell from the table, from the specified row and column - * - * @param index - * @param column - * @return Any - */ - @Override - public Any getData(long index, Column column) { - return barrageSubscription.getData(index, column.getIndex()); - } - - /** - * the Format to use for a cell from the specified row and column - * - * @param index - * @param column - * @return {@link Format} - */ - @Override - public Format getFormat(int index, Column column) { - return getFormat((long) index, column); - } - - @Override - public Format getFormat(long index, Column column) { - long cellColors = 0; - long rowColors = 0; - String numberFormat = null; - String formatString = null; - if (column.getStyleColumnIndex() != null) { - cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); - } - if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - rowColors = barrageSubscription.getData(index, rowStyleColumn); - } - if (column.getFormatStringColumnIndex() != null) { - numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); - } - if (column.getFormatStringColumnIndex() != null) { - formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); - } - return new Format(cellColors, rowColors, numberFormat, formatString); - } - - @Override - public JsArray getColumns() { - return columns; - } - - /** - * The ordered set of row indexes added since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getAdded() { - return added; - } - - /** - * The ordered set of row indexes removed since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getRemoved() { - return removed; - } - - /** - * The ordered set of row indexes updated since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getModified() { - return modified; - } - - @JsProperty - public JsRangeSet getFullIndex() { - return new JsRangeSet(barrageSubscription.getCurrentRowSet()); - } - } - - private void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { - if (serverViewport != null || serverReverseViewport) { - throw new IllegalStateException("Not a viewport subscription"); - } - subscriptionReady = serverColumns.equals(this.columnBitSet); - } - - private void onFlightData(FlightData data) { - WebBarrageStreamReader reader = new WebBarrageStreamReader(); - WebBarrageMessage message; - try { - message = reader.parseFrom(options, null, state.chunkTypes(), state.columnTypes(), state.componentTypes(), - data); - } catch (IOException e) { - throw new RuntimeException(e); - } - if (message != null) { - // This payload resulted in an update to the table's contents, inform the subscription - barrageSubscription.applyUpdates(message); - } } /** @@ -336,42 +40,17 @@ private void onFlightData(FlightData data) { * @param updateIntervalMs the new update interval, or null/omit to use the default of one second */ public void changeSubscription(JsArray columns, @JsNullable Double updateIntervalMs) { - // TODO validate that we can change updateinterval - this.columns = columns; - this.columnBitSet = state.makeBitset(Js.uncheckedCast(columns)); - this.options = BarrageSubscriptionOptions.builder() - .batchSize(WebBarrageSubscription.BATCH_SIZE) - .maxMessageSize(WebBarrageSubscription.MAX_MESSAGE_SIZE) - .columnConversionMode(ColumnConversionMode.Stringify) - .minUpdateIntervalMs(updateIntervalMs == null ? 0 : (int) (double) updateIntervalMs) - .columnsAsList(false) - .build(); - FlatBufferBuilder request = WebBarrageSubscription.subscriptionRequest( - Js.uncheckedCast(state.getHandle().getTicket()), - columnBitSet, - null, - options); - FlightData subscriptionRequest = new FlightData(); - subscriptionRequest - .setAppMetadata(WebBarrageUtils.wrapMessage(request, BarrageMessageType.BarrageSubscriptionRequest)); - doExchange.send(subscriptionRequest); + sendBarrageSubscriptionRequest(null, columns, updateIntervalMs, false); } - /** - * The columns that were subscribed to when this subscription was created - * - * @return {@link Column} - */ @JsProperty + @Override public JsArray getColumns() { - return columns; + return super.getColumns(); } - /** - * Stops the subscription on the server. - */ + @Override public void close() { - doExchange.end(); - doExchange.cancel(); + super.close(); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index e37c77b06d5..78f87c1415a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -21,7 +21,6 @@ import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.web.client.api.Callbacks; import io.deephaven.web.client.api.Column; -import io.deephaven.web.client.api.HasEventHandling; import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; @@ -30,7 +29,9 @@ import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.fu.JsLog; -import io.deephaven.web.client.state.ClientTableState; +import io.deephaven.web.client.fu.LazyPromise; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; import io.deephaven.web.shared.data.TableSnapshot; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNullable; @@ -78,7 +79,11 @@ */ @TsInterface @TsName(namespace = "dh") -public class TableViewportSubscription extends HasEventHandling { +public class TableViewportSubscription extends AbstractTableSubscription { + + // TODO move to superclass and check on viewport change + private RangeSet serverViewport; + /** * Describes the possible lifecycle of the viewport as far as anything external cares about it */ @@ -103,82 +108,99 @@ public enum Status { private final double refresh; private final JsTable original; - private final ClientTableState originalState; - private final Promise copy; - private JsTable realized; - private boolean retained;// if the sub is set up to not close the underlying table once the original table is done - // with it + /** + * true if the sub is set up to not close the underlying table once the original table is done with it, otherwise + * false. + */ private boolean originalActive = true; + /** + * true if the developer has called methods directly on the subscription, otherwise false. + */ + private boolean retained; private Status status = Status.STARTING; + private UpdateEventData viewportData; + public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, JsTable existingTable) { + super(existingTable.state(), existingTable.getConnection()); + + setInternalViewport(firstRow, lastRow, columns, updateIntervalMs, null); + refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; - // first off, copy the table, and flatten/pUT it, then apply the new viewport to that this.original = existingTable; - this.originalState = original.state(); - copy = existingTable.copy(false).then(table -> new Promise<>((resolve, reject) -> { - // Wait until the state is running to copy it - originalState.onRunning(newState -> { - if (this.status == Status.DONE) { - JsLog.debug("TableViewportSubscription closed before originalState.onRunning completed, ignoring"); - table.close(); - return; - } - table.batch(batcher -> { - batcher.customColumns(newState.getCustomColumns()); - batcher.filter(newState.getFilters()); - batcher.sort(newState.getSorts()); - - batcher.setFlat(true); - }); - // TODO handle updateInterval core#188 - Column[] columnsToSub = table.isBlinkTable() ? Js.uncheckedCast(table.getColumns()) : columns; - // table.setInternalViewport(firstRow, lastRow, columnsToSub); - - // Listen for events and refire them on ourselves, optionally on the original table - table.addEventListener(JsTable.EVENT_UPDATED, this::refire); - table.addEventListener(JsTable.EVENT_ROWADDED, this::refire); - table.addEventListener(JsTable.EVENT_ROWREMOVED, this::refire); - table.addEventListener(JsTable.EVENT_ROWUPDATED, this::refire); - table.addEventListener(JsTable.EVENT_SIZECHANGED, this::refire); - // TODO (core#1181): fix this hack that enables barrage errors to propagate to the UI widget - table.addEventListener(JsTable.EVENT_REQUEST_FAILED, this::refire); - - // Take over for the "parent" table - // Cache original table size so we can tell if we need to notify about a change - double originalSize = newState.getSize(); - realized = table; - status = Status.ACTIVE; - // At this point we're now responsible for notifying of size changes, since we will shortly have a - // viewport, - // a more precise way to track the table size (at least w.r.t. the range of the viewport), so if there - // is any difference in size between "realized" and "original", notify now to finish the transition. - if (realized.getSize() != originalSize) { - JsLog.debug( - "firing size changed to transition between table managing its own size changes and viewport sub taking over", - realized.getSize()); - CustomEventInit init = CustomEventInit.create(); - init.setDetail(realized.getSize()); - refire(new CustomEvent(JsTable.EVENT_SIZECHANGED, init)); - } + } - resolve.onInvoke(table); - }, table::close); - })); + @Override + protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { + // viewport subscriptions are sometimes required to notify of size change events + if (rowsAdded.size() != rowsRemoved.size() && originalActive) { + fireEventWithDetail(JsTable.EVENT_SIZECHANGED, size()); + } + + // TODO fire legacy table row added/updated/modified events + // for (Integer index : mergeResults.added) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWADDED, addedEvent); + // } + // for (Integer index : mergeResults.modified) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWUPDATED, addedEvent); + // } + // for (Integer index : mergeResults.removed) { + // CustomEventInit> addedEvent = CustomEventInit.create(); + // addedEvent.setDetail(wrap(vpd.getRows().getAt(index), index)); + // fireEvent(EVENT_ROWREMOVED, addedEvent); + // } + + // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... + UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); + detail.offset = this.serverViewport.getFirstRow(); + this.viewportData = detail; + CustomEventInit event = CustomEventInit.create(); + event.setDetail(detail); + refire(new CustomEvent<>(EVENT_UPDATED, event)); } - /** - * Reflects the state of the original table, before being flattened. - */ - public ClientTableState state() { - return originalState; + @Override + public void fireEvent(String type) { + refire(new CustomEvent<>(type)); + } + + @Override + public void fireEventWithDetail(String type, T detail) { + CustomEventInit init = CustomEventInit.create(); + init.setDetail(detail); + refire(new CustomEvent(type, init)); + } + + @Override + public void fireEvent(String type, CustomEventInit init) { + refire(new CustomEvent(type, init)); } + @Override + public void fireEvent(String type, CustomEvent e) { + if (type.equals(e.type)) { + throw new IllegalArgumentException(type + " != " + e.type); + } + refire(e); + } + + /** + * Utility to fire an event on this object and also optionally on the parent if still active. All {@code fireEvent} + * overloads dispatch to this. + * + * @param e the event to fire + * @param the type of the custom event data + */ private void refire(CustomEvent e) { - this.fireEvent(e.type, e); + // explicitly calling super.fireEvent to avoid calling ourselves recursively + super.fireEvent(e.type, e); if (originalActive && state() == original.state()) { // When these fail to match, it probably means that the original's state was paused, but we're still // holding on to it. Since we haven't been internalClose()d yet, that means we're still waiting for @@ -203,23 +225,27 @@ private void retainForExternalUse() { */ @JsMethod public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable Column[] columns, - @JsOptional @JsNullable Double updateIntervalMs) { + @JsOptional @JsNullable Double updateIntervalMs, + @JsOptional @JsNullable Boolean isReverseViewport) { retainForExternalUse(); - setInternalViewport(firstRow, lastRow, columns, updateIntervalMs); + setInternalViewport(firstRow, lastRow, columns, updateIntervalMs, isReverseViewport); } - public void setInternalViewport(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs) { + public void setInternalViewport(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, + Boolean isReverseViewport) { if (updateIntervalMs != null && refresh != updateIntervalMs) { throw new IllegalArgumentException( "Can't change refreshIntervalMs on a later call to setViewport, it must be consistent or omitted"); } - copy.then(table -> { - if (!table.isBlinkTable()) { - // we only set blink table viewports once; and that's in the constructor - // table.setInternalViewport(firstRow, lastRow, columns); - } - return Promise.resolve(table); - }); + if (isReverseViewport == null) { + isReverseViewport = false; + } + if (!state().getTableDef().getAttributes().isBlinkTable()) { + // we only set blink table viewports once; and that's in the constructor + serverViewport = RangeSet.ofRange((long) firstRow, (long) lastRow); + this.sendBarrageSubscriptionRequest( + serverViewport, Js.uncheckedCast(columns), updateIntervalMs, isReverseViewport); + } } /** @@ -231,6 +257,8 @@ public void close() { JsLog.warn("TableViewportSubscription.close called on subscription that's already done."); } retained = false; + + // Instead of calling super.close(), we delegate to internalClose() internalClose(); } @@ -250,13 +278,7 @@ public void internalClose() { status = Status.DONE; - // not retained externally, and the original is inactive, mark as "not realized" - realized = null; - - copy.then(table -> { - table.close(); - return Promise.resolve(table); - }); + super.close(); } /** @@ -271,115 +293,114 @@ public Promise getViewportData() { } public Promise getInternalViewportData() { - return copy.then(JsTable::getInternalViewportData); + if (isSubscriptionReady()) { + return Promise.resolve(viewportData); + } + final LazyPromise promise = new LazyPromise<>(); + addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(viewportData)); + return promise.asPromise(); } public Status getStatus() { - if (realized == null) { - assert status != Status.ACTIVE - : "when the realized table is null, status should only be DONE or STARTING, instead is " + status; - } else { - if (realized.isAlive()) { - assert status == Status.ACTIVE - : "realized table is alive, expected status ACTIVE, instead is " + status; - } else { - assert status == Status.DONE : "realized table is closed, expected status DONE, instead is " + status; - } - } + // if (realized == null) { + // assert status != Status.ACTIVE + // : "when the realized table is null, status should only be DONE or STARTING, instead is " + status; + // } else { + // if (realized.isAlive()) { + // assert status == Status.ACTIVE + // : "realized table is alive, expected status ACTIVE, instead is " + status; + // } else { + // assert status == Status.DONE : "realized table is closed, expected status DONE, instead is " + status; + // } + // } return status; } public double size() { - assert getStatus() == Status.ACTIVE; - return realized.getSize(); - } - - public double totalSize() { - assert getStatus() == Status.ACTIVE; - return realized.getTotalSize(); + // TODO this is wrong + assert getStatus() != Status.DONE; + return super.size(); } @JsMethod public Promise snapshot(JsRangeSet rows, Column[] columns) { retainForExternalUse(); // TODO #1039 slice rows and drop columns - return copy.then(table -> { - final ClientTableState state = table.lastVisibleState(); - String[] columnTypes = Arrays.stream(state.getTableDef().getColumns()) - .map(ColumnDefinition::getType) - .toArray(String[]::new); - - final BitSet columnBitset = table.lastVisibleState().makeBitset(columns); - return Callbacks.promise(this, callback -> { - WorkerConnection connection = table.getConnection(); - BiDiStream stream = connection.streamFactory().create( - headers -> connection.flightServiceClient().doExchange(headers), - (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), - (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, - c::apply), - new FlightData()); - - FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); - int columnsOffset = BarrageSnapshotRequest.createColumnsVector(doGetRequest, - columnBitset.toByteArray()); - int viewportOffset = BarrageSnapshotRequest.createViewportVector(doGetRequest, serializeRanges( - Collections.singleton(rows.getRange()))); - int serializationOptionsOffset = BarrageSnapshotOptions - .createBarrageSnapshotOptions(doGetRequest, ColumnConversionMode.Stringify, true, 0, 0); - int tableTicketOffset = - BarrageSnapshotRequest.createTicketVector(doGetRequest, - TypedArrayHelper.wrap(state.getHandle().getTicket())); - BarrageSnapshotRequest.startBarrageSnapshotRequest(doGetRequest); - BarrageSnapshotRequest.addTicket(doGetRequest, tableTicketOffset); - BarrageSnapshotRequest.addColumns(doGetRequest, columnsOffset); - BarrageSnapshotRequest.addSnapshotOptions(doGetRequest, serializationOptionsOffset); - BarrageSnapshotRequest.addViewport(doGetRequest, viewportOffset); - doGetRequest.finish(BarrageSnapshotRequest.endBarrageSnapshotRequest(doGetRequest)); - - FlightData request = new FlightData(); - request.setAppMetadata( - WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSnapshotRequest)); - stream.send(request); - stream.end(); - stream.onData(flightData -> { - - Message message = Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); - if (message.headerType() == MessageHeader.Schema) { - // ignore for now, we'll handle this later - return; - } - assert message.headerType() == MessageHeader.RecordBatch; - RecordBatch header = (RecordBatch) message.header(new RecordBatch()); - Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); - BarrageUpdateMetadata update = null; - if (appMetadataBytes.length != 0) { - BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper - .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); - - update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - barrageMessageWrapper.msgPayloadAsByteBuffer()); - } - TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, - WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), - update, true, columnTypes); - callback.onSuccess(snapshot); - }); - stream.onStatus(status -> { - if (!status.isOk()) { - callback.onFailure(status.getDetails()); - } - }); - }).then(defer()).then(snapshot -> { - SubscriptionTableData pretendSubscription = new SubscriptionTableData(Js.uncheckedCast(columns), - state.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : state.getRowFormatColumn().getIndex(), - null); - TableData data = pretendSubscription.handleSnapshot(snapshot); - return Promise.resolve(data); - }).then(defer()); - }); + // final ClientTableState state = original.lastVisibleState(); + String[] columnTypes = Arrays.stream(state().getTableDef().getColumns()) + .map(ColumnDefinition::getType) + .toArray(String[]::new); + + final BitSet columnBitset = state().makeBitset(columns); + return Callbacks.promise(this, callback -> { + WorkerConnection connection = connection(); + BiDiStream stream = connection.streamFactory().create( + headers -> connection.flightServiceClient().doExchange(headers), + (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), + (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, + c::apply), + new FlightData()); + + FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); + int columnsOffset = BarrageSnapshotRequest.createColumnsVector(doGetRequest, + columnBitset.toByteArray()); + int viewportOffset = BarrageSnapshotRequest.createViewportVector(doGetRequest, serializeRanges( + Collections.singleton(rows.getRange()))); + int serializationOptionsOffset = BarrageSnapshotOptions + .createBarrageSnapshotOptions(doGetRequest, ColumnConversionMode.Stringify, true, 0, 0); + int tableTicketOffset = + BarrageSnapshotRequest.createTicketVector(doGetRequest, + TypedArrayHelper.wrap(state().getHandle().getTicket())); + BarrageSnapshotRequest.startBarrageSnapshotRequest(doGetRequest); + BarrageSnapshotRequest.addTicket(doGetRequest, tableTicketOffset); + BarrageSnapshotRequest.addColumns(doGetRequest, columnsOffset); + BarrageSnapshotRequest.addSnapshotOptions(doGetRequest, serializationOptionsOffset); + BarrageSnapshotRequest.addViewport(doGetRequest, viewportOffset); + doGetRequest.finish(BarrageSnapshotRequest.endBarrageSnapshotRequest(doGetRequest)); + + FlightData request = new FlightData(); + request.setAppMetadata( + WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSnapshotRequest)); + stream.send(request); + stream.end(); + stream.onData(flightData -> { + + Message message = Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); + if (message.headerType() == MessageHeader.Schema) { + // ignore for now, we'll handle this later + return; + } + assert message.headerType() == MessageHeader.RecordBatch; + RecordBatch header = (RecordBatch) message.header(new RecordBatch()); + Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); + BarrageUpdateMetadata update = null; + if (appMetadataBytes.length != 0) { + BarrageMessageWrapper barrageMessageWrapper = + BarrageMessageWrapper + .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); + + update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( + barrageMessageWrapper.msgPayloadAsByteBuffer()); + } + TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, + WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), + update, true, columnTypes); + callback.onSuccess(snapshot); + }); + stream.onStatus(status -> { + if (!status.isOk()) { + callback.onFailure(status.getDetails()); + } + }); + }).then(defer()).then(snapshot -> { + SubscriptionTableData pretendSubscription = new SubscriptionTableData(Js.uncheckedCast(columns), + state().getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + : state().getRowFormatColumn().getIndex(), + null); + TableData data = pretendSubscription.handleSnapshot(snapshot); + return Promise.resolve(data); + }).then(defer()); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index 82fc7d2ea6e..eea16930ffa 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -279,7 +279,6 @@ public static Object cleanData(Object dataColumn, Column column) { @JsProperty public double getOffset() { return offset; - } @Override diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java index 46448b46c8f..7c83a2f8987 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java @@ -11,15 +11,22 @@ public abstract class Reference { } Reference(T referent, ReferenceQueue queue) { - jsWeakRef = new JsWeakRef<>(referent); + if (referent != null) { + jsWeakRef = new JsWeakRef<>(referent); + } } public T get() { - return this.jsWeakRef.deref(); + if (jsWeakRef == null) { + return null; + } + return jsWeakRef.deref(); } public void clear() { - this.jsWeakRef = null; + if (jsWeakRef != null) { + jsWeakRef = null; + } } public boolean isEnqueued() { diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/TotalsTableTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/TotalsTableTestGwt.java index 6a6c2d3e706..857cad855c5 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/TotalsTableTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/TotalsTableTestGwt.java @@ -72,7 +72,7 @@ public void testQueryDefinedConfigs() { .then(totals -> { assertEquals(3, totals.getColumns().length); assertEquals(1, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); return waitForEvent(totals, JsTable.EVENT_UPDATED, checkTotals(totals, 5, 6., 0, "a1"), 2508); @@ -81,7 +81,7 @@ public void testQueryDefinedConfigs() { .then(totals -> { assertEquals(3, totals.getColumns().length); assertEquals(1, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); return waitForEvent(totals, JsTable.EVENT_UPDATED, checkTotals(totals, 5, 6.0, 0., "a2"), 2509); @@ -112,7 +112,7 @@ public void ignore_testTotalsOnFilteredTable() { totalTables[0] = totals; assertEquals(3, totals.getColumns().length); assertEquals(1, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); // confirm the normal totals match the filtered data return waitForEvent(totals, JsTable.EVENT_UPDATED, @@ -123,7 +123,7 @@ public void ignore_testTotalsOnFilteredTable() { totalTables[1] = totals; assertEquals(3, totals.getColumns().length); assertEquals(1, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); // confirm the grand totals are unchanged return waitForEvent(totals, JsTable.EVENT_UPDATED, @@ -238,7 +238,7 @@ public void ignore_testFilteringTotalsTable() { totalTables[0] = totals; assertEquals(4, totals.getColumns().length); assertEquals(2, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); // confirm the normal totals match the filtered data return waitForEvent(totals, JsTable.EVENT_UPDATED, checkTotals(totals, "a1", @@ -250,7 +250,7 @@ public void ignore_testFilteringTotalsTable() { totalTables[1] = totals; assertEquals(4, totals.getColumns().length); assertEquals(2, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); // confirm the grand totals include the missing row... return waitForEvent(totals, JsTable.EVENT_UPDATED, checkTotals(totals, "a2", @@ -269,8 +269,8 @@ public void ignore_testFilteringTotalsTable() { totalTables[1].applyFilter(new FilterCondition[] { totalTables[1].findColumn("J__Avg").filter().eq(FilterValue.ofNumber(5.0)) }); - totalTables[0].setViewport(0, 100, null, null); - totalTables[1].setViewport(0, 100, null, null); + totalTables[0].setViewport(0, 100, null, null, null); + totalTables[1].setViewport(0, 100, null, null, null); return promiseAllThen(table, totalPromises[0] = waitForEvent(totalTables[0], JsTable.EVENT_UPDATED, @@ -327,7 +327,7 @@ public void testGroupedTotals() { (JsTotalsTable totals) -> { assertEquals(4, totals.getColumns().length); assertEquals(2, totals.getSize(), DELTA); - totals.setViewport(0, 100, null, null); + totals.setViewport(0, 100, null, null, null); // confirm the grand totals are unchanged return waitForEvent(totals, JsTable.EVENT_UPDATED, update -> { From 0b2f6c2a01e9e0a8bab17dca87c426b116a031ad Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Apr 2024 20:02:54 -0500 Subject: [PATCH 023/219] More proposed server cleanup --- engine/chunk/src/main/java/io/deephaven/chunk/ChunkType.java | 4 ++-- .../extensions/barrage/table/BarrageRedirectedTable.java | 1 - .../io/deephaven/extensions/barrage/table/BarrageTable.java | 2 +- .../extensions/barrage/util/BarrageStreamReader.java | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkType.java b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkType.java index 5929d90efd2..91920ba8ee9 100644 --- a/engine/chunk/src/main/java/io/deephaven/chunk/ChunkType.java +++ b/engine/chunk/src/main/java/io/deephaven/chunk/ChunkType.java @@ -22,7 +22,7 @@ public enum ChunkType implements ChunkFactory { Long(new LongChunkFactory()), Float(new FloatChunkFactory()), Double(new DoubleChunkFactory()), - Object(new ObjectChunkFactory()); + Object(new ObjectChunkFactory<>()); // @formatter:on @@ -30,7 +30,7 @@ public enum ChunkType implements ChunkFactory { ChunkType.Boolean, ChunkType.Char, ChunkType.Byte, ChunkType.Short, ChunkType.Int, ChunkType.Long, ChunkType.Float, ChunkType.Double, ChunkType.Object); - public static ChunkType fromElementType(Class elementType) { + public static ChunkType fromElementType(Class elementType) { return fromElementTypeMap.get(elementType); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageRedirectedTable.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageRedirectedTable.java index fe735a20f5b..ec0c0f9f953 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageRedirectedTable.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageRedirectedTable.java @@ -30,7 +30,6 @@ import org.jetbrains.annotations.Nullable; import java.util.ArrayDeque; -import java.util.BitSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java index 118c968e479..d1a6c628159 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java @@ -101,7 +101,7 @@ public interface ViewportChangedCallback { * Due to the asynchronous aspect of this protocol, the client may have multiple requests in-flight and the server * may choose to honor the most recent request and assumes that the client no longer wants earlier but unacked * viewport changes. - * + *

* The server notifies the client which viewport it is respecting by including it inside of each snapshot. Note that * the server assumes that the client has maintained its state prior to these server-side viewport acks and will not * re-send data that the client should already have within the existing viewport. diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 3d749adee21..834cda0ae00 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -177,7 +177,7 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } if (header.headerType() != org.apache.arrow.flatbuf.MessageHeader.RecordBatch) { - throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); + throw new IllegalStateException("Only know how to decode Schema/RecordBatch messages"); } // throw an error when no app metadata (snapshots now provide by default) From 588e416f0546b576fd3cd243ab475c7ea2b2f05c Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 10 Apr 2024 15:27:19 -0500 Subject: [PATCH 024/219] Suggest narrower type for -1/1 --- .../main/java/io/deephaven/engine/rowset/RowSetShiftData.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetShiftData.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetShiftData.java index 9e1c20247b1..c524af02b8a 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetShiftData.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/RowSetShiftData.java @@ -248,7 +248,7 @@ public void apply(final Callback shiftCallback) { for (int idx = 0; idx < polaritySwapSize; ++idx) { int start = (idx == 0) ? 0 : polaritySwapIndices.get(idx - 1); int end = polaritySwapIndices.get(idx) - 1; - final long dir = getShiftDelta(start) > 0 ? -1 : 1; + final int dir = getShiftDelta(start) > 0 ? -1 : 1; if (dir < 0) { final int tmp = start; start = end; @@ -272,7 +272,7 @@ public void unapply(final Callback shiftCallback) { for (int idx = 0; idx < polaritySwapSize; ++idx) { int start = (idx == 0) ? 0 : polaritySwapIndices.get(idx - 1); int end = polaritySwapIndices.get(idx) - 1; - final long dir = getShiftDelta(start) > 0 ? 1 : -1; + final int dir = getShiftDelta(start) > 0 ? 1 : -1; if (dir < 0) { final int tmp = start; start = end; From 8b96b9bd4b375d7a191ce564a9b040425d6eb73f Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 10 Apr 2024 15:28:00 -0500 Subject: [PATCH 025/219] Continue trying to make apis more consistent --- .../io/deephaven/web/client/api/JsTable.java | 110 +++--- .../deephaven/web/client/api/TableData.java | 38 +- .../api/barrage/WebBarrageStreamReader.java | 17 +- .../barrage/data/WebBarrageSubscription.java | 2 + .../AbstractTableSubscription.java | 42 +- .../subscription/SubscriptionTableData.java | 5 + .../client/api/subscription/ViewportData.java | 373 +----------------- .../web/client/api/tree/JsTreeTable.java | 32 +- .../api/subscription/ViewportTestGwt.java | 15 +- 9 files changed, 178 insertions(+), 456 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index e8d6df43df9..d20cf2f6ac2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -1930,61 +1930,61 @@ public WorkerConnection getConnection() { return workerConnection; } - public void refreshViewport(ClientTableState state, Viewport vp) { - assert state() == state : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; - assert state.getResolution() == ClientTableState.ResolutionState.RUNNING - : "Do not call refreshViewport for a state that is not running! (" + state + ")"; - - currentViewportData = null; // ignore any deltas for past viewports - workerConnection.scheduleCheck(state); - // now that we've made sure the server knows, if we already know that the viewport is beyond what exists, we - // can go ahead and fire an update event. We're in the onResolved call, so we know the handle has resolved - // and if size is not -1, then we've already at least gotten the initial snapshot (otherwise, that snapshot - // will be here soon, and will fire its own event) - if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED && state.getSize() <= vp.getRows().getFirstRow()) { - JsLog.debug("Preparing to send a 'fake' update event since " + state.getSize() + "<=" - + vp.getRows().getFirstRow(), state); - LazyPromise.runLater(() -> { - if (state != state()) { - return; - } - - // get the column expected to be in the snapshot - JsArray columns = null;// Js.uncheckedCast(getBinding().getColumns()); - Column[] allColumns = state.getColumns(); - if (columns == null) { - columns = Js.uncheckedCast(allColumns); - } - // build an array of empty column data for this snapshot - Object[] dataColumns = new Object[allColumns.length]; - - for (int i = 0; i < columns.length; i++) { - Column c = columns.getAt(i); - dataColumns[c.getIndex()] = JsData.newArray(c.getType()); - if (c.getFormatStringColumnIndex() != null) { - dataColumns[c.getFormatStringColumnIndex()] = JsData.newArray("java.lang.String"); - } - if (c.getStyleColumnIndex() != null) { - dataColumns[c.getStyleColumnIndex()] = JsData.newArray("long"); - } - } - if (currentState.getRowFormatColumn() != null) { - dataColumns[currentState.getRowFormatColumn().getIndex()] = JsData.newArray("long"); - } - - ViewportData data = new ViewportData(RangeSet.empty(), dataColumns, columns, - currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : currentState.getRowFormatColumn().getIndex(), - 0); - this.currentViewportData = data; - CustomEventInit updatedEvent = CustomEventInit.create(); - updatedEvent.setDetail(data); - JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" + vp.getRows().getFirstRow(), - vp, state); - fireEvent(EVENT_UPDATED, updatedEvent); - }); - } - } +// public void refreshViewport(ClientTableState state, Viewport vp) { +// assert state() == state : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; +// assert state.getResolution() == ClientTableState.ResolutionState.RUNNING +// : "Do not call refreshViewport for a state that is not running! (" + state + ")"; +// +// currentViewportData = null; // ignore any deltas for past viewports +// workerConnection.scheduleCheck(state); +// // now that we've made sure the server knows, if we already know that the viewport is beyond what exists, we +// // can go ahead and fire an update event. We're in the onResolved call, so we know the handle has resolved +// // and if size is not -1, then we've already at least gotten the initial snapshot (otherwise, that snapshot +// // will be here soon, and will fire its own event) +// if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED && state.getSize() <= vp.getRows().getFirstRow()) { +// JsLog.debug("Preparing to send a 'fake' update event since " + state.getSize() + "<=" +// + vp.getRows().getFirstRow(), state); +// LazyPromise.runLater(() -> { +// if (state != state()) { +// return; +// } +// +// // get the column expected to be in the snapshot +// JsArray columns = null;// Js.uncheckedCast(getBinding().getColumns()); +// Column[] allColumns = state.getColumns(); +// if (columns == null) { +// columns = Js.uncheckedCast(allColumns); +// } +// // build an array of empty column data for this snapshot +// Object[] dataColumns = new Object[allColumns.length]; +// +// for (int i = 0; i < columns.length; i++) { +// Column c = columns.getAt(i); +// dataColumns[c.getIndex()] = JsData.newArray(c.getType()); +// if (c.getFormatStringColumnIndex() != null) { +// dataColumns[c.getFormatStringColumnIndex()] = JsData.newArray("java.lang.String"); +// } +// if (c.getStyleColumnIndex() != null) { +// dataColumns[c.getStyleColumnIndex()] = JsData.newArray("long"); +// } +// } +// if (currentState.getRowFormatColumn() != null) { +// dataColumns[currentState.getRowFormatColumn().getIndex()] = JsData.newArray("long"); +// } +// +// ViewportData data = new ViewportData(RangeSet.empty(), dataColumns, columns, +// currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN +// : currentState.getRowFormatColumn().getIndex(), +// 0); +// this.currentViewportData = data; +// CustomEventInit updatedEvent = CustomEventInit.create(); +// updatedEvent.setDetail(data); +// JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" + vp.getRows().getFirstRow(), +// vp, state); +// fireEvent(EVENT_UPDATED, updatedEvent); +// }); +// } +// } public boolean isActive(ClientTableState state) { return currentState == state; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java index 5018db59ab7..739b49d26e4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java @@ -17,13 +17,24 @@ import jsinterop.base.Js; /** - * Common interface for various ways of accessing table data and formatting. - * + * Common interface for various ways of accessing table data and formatting for viewport or non-viewport subscriptions on tables, + * data in trees, and snapshots. + *

+ * Generally speaking, it is more efficient to access data in column-major order, rather than iterating through + * each Row and accessing all columns that it holds. The {@link #getRows()} accessor can be useful to read row data, + * but may incur other costs - it is likely faster to access data by columns using {@link #getData(RowPositionUnion, Column)}. + */ +/* * Java note: this interface contains some extra overloads that aren't available in JS. Implementations are expected to * implement only abstract methods, and default methods present in this interface will dispatch accordingly. */ @TsName(namespace = "dh") public interface TableData { + public static final int NO_ROW_FORMAT_COLUMN = -1; + + /** + * TS type union to allow either "int" or "LongWrapper" to be passed as an argument for various methods. + */ @TsUnion @JsType(name = "?", namespace = JsPackage.GLOBAL, isNative = true) interface RowPositionUnion { @@ -50,6 +61,21 @@ default int asInt() { } } + @JsProperty + JsRangeSet getFullIndex(); + + @JsProperty + JsRangeSet getAdded(); + + @JsProperty + JsRangeSet getRemoved(); + + @JsProperty + JsRangeSet getModified(); + +// @JsProperty +// JsShiftData getShifts(); + @JsProperty JsArray getColumns(); @@ -92,8 +118,14 @@ default Format getFormat(RowPositionUnion index, Column column) { Format getFormat(long index, Column column); + /** + * The position of the first returned row, null if this data is not for a viewport. + */ + @JsProperty + Double getOffset(); + @TsName(namespace = "dh") - public interface Row { + interface Row { @JsProperty LongWrapper getIndex(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index e90ca633eac..06f6625e085 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -4,7 +4,6 @@ package io.deephaven.web.client.api.barrage; import com.google.common.io.LittleEndianDataInputStream; -import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; @@ -40,7 +39,6 @@ public class WebBarrageStreamReader { private static final int MAX_CHUNK_SIZE = Integer.MAX_VALUE - 8; - // record progress in reading private long numAddRowsRead = 0; private long numAddRowsTotal = 0; @@ -151,20 +149,6 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp throw new IllegalStateException("Only know how to decode Schema/RecordBatch messages"); } - ByteBuffer body = TypedArrayHelper.wrap(flightData.getDataBody_asU8()); - // final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); - // DomGlobal.console.log(headerType, MessageHeader.names[headerType]); - // DomGlobal.console.log("body.limit()", body.limit()); - // DomGlobal.console.log("batch.length()", batch.length()); - // DomGlobal.console.log("batch.buffersLength()", batch.buffersLength()); - // for (int i = 0; i < batch.buffersLength(); i++) { - // DomGlobal.console.log("batch.buffers("+i+").offset()", batch.buffers(i).offset()); - // DomGlobal.console.log("batch.buffers("+i+").length()", batch.buffers(i).length()); - // } - - // if (!body.hasRemaining()) { - // throw new IllegalStateException("Missing body tag"); - // } // throw an error when no app metadata (snapshots now provide by default) if (msg == null) { @@ -174,6 +158,7 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); msg.length = batch.length(); + ByteBuffer body = TypedArrayHelper.wrap(flightData.getDataBody_asU8()); final LittleEndianDataInputStream ois = new LittleEndianDataInputStream(new ByteBufferInputStream(body)); final Iterator fieldNodeIter = diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index f0df617fb4c..0ceb9c6bcf3 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -279,6 +279,7 @@ public void applyUpdates(WebBarrageMessage message) { if (message.isSnapshot) { viewportChangedHandler.onServerViewportChanged(serverViewport, serverColumns, serverReverseViewport); } + state.setSize(message.rowsAdded.size()); dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, RangeSet.empty(), message.shifted, new BitSet(0)); } @@ -449,6 +450,7 @@ public void applyUpdates(WebBarrageMessage message) { if (message.isSnapshot) { viewportChangedHandler.onServerViewportChanged(serverViewport, serverColumns, serverReverseViewport); } + state.setSize(currentRowSet.size()); dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, totalMods, message.shifted, modifiedColumnSet); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 00fdb43808e..cbca8970dc4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -8,6 +8,7 @@ import com.vertispan.tsdefs.annotations.TsName; import elemental2.core.JsArray; import elemental2.dom.CustomEventInit; +import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.ColumnConversionMode; @@ -127,13 +128,16 @@ private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet to // TODO if this was a snapshot (or subscriptionReady was false for some interval), we probably need to // notify of the entire table as a single big change - notifyUpdate(rowsAdded, rowsRemoved, totalMods, shifted); } protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { - // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... - UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); + UpdateEventData detail = new UpdateEventData( + transformRowsetForConsumer(rowsAdded), + transformRowsetForConsumer(rowsRemoved), + transformRowsetForConsumer(totalMods), + barrageSubscription.getServerViewport() != null ? null : shifted + ); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); fireEvent(TableSubscription.EVENT_UPDATED, event); @@ -171,7 +175,7 @@ public Format getFormat(Column column) { if (column.getStyleColumnIndex() != null) { cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); } - if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { + if (rowStyleColumn != TableData.NO_ROW_FORMAT_COLUMN) { rowColors = barrageSubscription.getData(index, rowStyleColumn); } if (column.getFormatStringColumnIndex() != null) { @@ -195,7 +199,7 @@ public class UpdateEventData implements TableData { // cached copy in case it was requested, could be requested again private JsArray allRows; - // TODO not this + // TODO expose this property only if this is a viewport public double offset; public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { @@ -210,7 +214,7 @@ public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, Shif * @return double */ @JsProperty - public double getOffset() { + public Double getOffset() { return offset; } @@ -223,8 +227,9 @@ public double getOffset() { public JsArray getRows() { if (allRows == null) { allRows = new JsArray<>(); - RangeSet positions = barrageSubscription.getCurrentRowSet() - .subsetForPositions(barrageSubscription.getServerViewport(), false); + RangeSet rowSet = barrageSubscription.getCurrentRowSet(); + RangeSet positions = transformRowsetForConsumer(rowSet); + DomGlobal.console.log(rowSet, positions); positions.indexIterator().forEachRemaining((long index) -> { allRows.push(new SubscriptionRow(index)); }); @@ -311,7 +316,7 @@ public JsArray getColumns() { * * @return dh.RangeSet */ - @JsProperty + @Override public JsRangeSet getAdded() { return added; } @@ -321,7 +326,7 @@ public JsRangeSet getAdded() { * * @return dh.RangeSet */ - @JsProperty + @Override public JsRangeSet getRemoved() { return removed; } @@ -331,17 +336,30 @@ public JsRangeSet getRemoved() { * * @return dh.RangeSet */ - @JsProperty + @Override public JsRangeSet getModified() { return modified; } - @JsProperty + @Override public JsRangeSet getFullIndex() { return new JsRangeSet(barrageSubscription.getCurrentRowSet()); } } + /** + * If a viewport is in use, transforms the given rowset to position space based on + * that viewport. + * @param rowSet the rowset to possibly transform + * @return a transformed rowset + */ + private RangeSet transformRowsetForConsumer(RangeSet rowSet) { + if (barrageSubscription.getServerViewport() != null) { + return rowSet.subsetForPositions(barrageSubscription.getServerViewport(), false);//TODO reverse + } + return rowSet; + } + protected void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { // if (serverViewport != null || serverReverseViewport) { // throw new IllegalStateException("Not a viewport subscription"); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java index b6cff055bbf..fb20e30494d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java @@ -665,5 +665,10 @@ public JsRangeSet getModified() { public JsRangeSet getFullIndex() { return new JsRangeSet(index); } + + @Override + public Double getOffset() { + return null; + } } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index eea16930ffa..48064ae10d2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -23,23 +23,31 @@ import java.util.Set; /** - * Contains data in the current viewport. Also contains the offset to this data, so that the actual row number may be - * determined. Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided + * Extends {@link TableData}, but only contains data in the current viewport. The only API change from TableData is that + * ViewportData also contains the offset to this data, so that the actual row number may be + * determined. + *

Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided * for easier scrolling without going to the server. */ @TsInterface @TsName(namespace = "dh") -public class ViewportData implements TableData { - private static final Any NULL_SENTINEL = Js.asAny(new JsObject()); +public interface ViewportData extends TableData { + + /** + * The position of the first returned row, null if this data is not for a viewport. + */ + @JsProperty + Double getOffset(); /** * Clean the data at the provided index */ @JsFunction - private interface DataCleaner { + interface DataCleaner { void clean(JsArray data, int index); } + @Deprecated public static final int NO_ROW_FORMAT_COLUMN = -1; public class MergeResults { @@ -48,75 +56,6 @@ public class MergeResults { public Set removed = new HashSet<>(); } - private long offset; - private int length; - private final int maxLength; - private JsArray rows; - private final JsArray columns; - - private final Object[] data; - - private final int rowFormatColumn; - - public ViewportData(RangeSet includedRows, Object[] dataColumns, JsArray columns, int rowFormatColumn, - long maxLength) { - assert maxLength <= Integer.MAX_VALUE; - this.maxLength = (int) maxLength; - - Iterator rangeIterator = includedRows.rangeIterator(); - data = new Object[dataColumns.length]; - if (rangeIterator.hasNext()) { - Range range = rangeIterator.next(); - assert !rangeIterator.hasNext() : "Snapshot only supports one range"; - - offset = range.getFirst(); - length = (int) (range.getLast() - range.getFirst() + 1); - assert length == range.size(); - } else { - offset = -1; - } - - // Clean data for requested columns, and provide format column data as well, if any - for (int i = 0; i < columns.length; i++) { - Column c = columns.getAt(i); - int index = c.getIndex(); - if (dataColumns[index] == null) { - // no data for this column, not requested in viewport - continue; - } - data[index] = cleanData(dataColumns[index], c); - if (c.getStyleColumnIndex() != null) { - data[c.getStyleColumnIndex()] = dataColumns[c.getStyleColumnIndex()]; - } - if (c.getFormatStringColumnIndex() != null) { - data[c.getFormatStringColumnIndex()] = dataColumns[c.getFormatStringColumnIndex()]; - } - } - - // Handle row format column, if any - this.rowFormatColumn = rowFormatColumn; - if (rowFormatColumn != NO_ROW_FORMAT_COLUMN) { - data[rowFormatColumn] = dataColumns[rowFormatColumn]; - } - - // Grow all columns to match the size of the viewport, if necessary - if (length < maxLength) { - for (int i = 0; i < data.length; i++) { - if (data[i] != null) { - JsArray existingColumnData = Js.uncheckedCast(data[i]); - existingColumnData.length = this.maxLength; - existingColumnData.fill(NULL_SENTINEL, length, this.maxLength); - } - } - } - - rows = new JsArray<>(); - for (int i = 0; i < length; i++) { - rows.push(new ViewportRow(i, data, data[rowFormatColumn])); - } - this.columns = JsObject.freeze(Js.uncheckedCast(columns.slice())); - } - private static DataCleaner getDataCleanerForColumnType(String columnType) { switch (columnType) { case "int": @@ -270,290 +209,4 @@ public static Object cleanData(Object dataColumn, Column column) { } } } - - /** - * The index of the first returned row - * - * @return double - */ - @JsProperty - public double getOffset() { - return offset; - } - - @Override - public Row get(long index) { - return getRows().getAt((int) index); - } - - @Override - public Row get(int index) { - return getRows().getAt(index); - } - - @Override - public Any getData(int index, Column column) { - return getRows().getAt(index).get(column); - } - - @Override - public Any getData(long index, Column column) { - return getRows().getAt((int) index).get(column); - } - - @Override - public Format getFormat(int index, Column column) { - return getRows().getAt(index).getFormat(column); - } - - @Override - public Format getFormat(long index, Column column) { - return getRows().getAt((int) index).getFormat(column); - } - - /** - * An array of rows of data - * - * @return {@link ViewportRow} array. - */ - @Override - @JsProperty - public JsArray getRows() { - if (rows.length != length) { - rows = new JsArray<>(); - for (int i = 0; i < length; i++) { - rows.push(new ViewportRow(i, data, data[rowFormatColumn])); - } - JsObject.freeze(rows); - } - return rows; - } - - /** - * A list of columns describing the data types in each row - * - * @return {@link Column} array. - */ - @Override - @JsProperty - public JsArray getColumns() { - return columns; - } - - // public MergeResults merge(DeltaUpdates updates) { - // if (offset == -1 && updates.getIncludedAdditions().size() > 0) { - // offset = updates.getIncludedAdditions().getFirstRow(); - // } - // final MergeResults updated = new MergeResults(); - // - // // First we remove rows by nulling them out. - // updates.getRemoved().indexIterator().forEachRemaining((long removedIndex) -> { - // int internalOffset = (int) (removedIndex - offset); - // if (internalOffset < 0 || internalOffset >= length) { - // return; - // } - // for (int i = 0; i < data.length; i++) { - // JsArray existingColumnData = Js.uncheckedCast(data[i]); - // if (existingColumnData == null) { - // continue; - // } - // existingColumnData.setAt(internalOffset, NULL_SENTINEL); - // } - // updated.removed.add(internalOffset); - // }); - // - // // Now we shift data around. - // boolean hasReverseShift = false; - // final ShiftedRange[] shiftedRanges = updates.getShiftedRanges(); - // - // // must apply shifts in mem-move semantics; so we shift forward from right to left first - // for (int si = shiftedRanges.length - 1; si >= 0; --si) { - // final ShiftedRange shiftedRange = shiftedRanges[si]; - // final long shiftDelta = shiftedRange.getDelta(); - // if (shiftDelta < 0) { - // hasReverseShift = true; - // continue; - // } - // - // final long beginAsLong = Math.max(shiftedRange.getRange().getFirst() - offset, 0); - // final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); - // if (end < beginAsLong) { - // // this range is out of our viewport - // continue; - // } - // - // // long math is expensive; so convert to int early/once - // final int begin = (int) beginAsLong; - // - // // iterate backward and move them forward - // for (int j = end; j >= begin; --j) { - // for (int i = 0; i < data.length; ++i) { - // final JsArray existingColumnData = Js.uncheckedCast(data[i]); - // if (existingColumnData == null) { - // continue; - // } - // - // final long internalOffsetAsLong = (j + shiftDelta); - // if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int - // final int internalOffset = (int) internalOffsetAsLong; - // updated.added.add(internalOffset); - // Any toMove = existingColumnData.getAt(j); - // existingColumnData.setAt(internalOffset, toMove); - // } - // - // updated.removed.add(j); - // existingColumnData.setAt(j, NULL_SENTINEL); - // } - // } - // } - // if (hasReverseShift) { - // // then we shift in reverse from left to right - // for (int si = 0; si < shiftedRanges.length; ++si) { - // final ShiftedRange shiftedRange = shiftedRanges[si]; - // final long shiftDelta = shiftedRange.getDelta(); - // if (shiftDelta > 0) { - // continue; - // } - // - // final long begin = Math.max(shiftedRange.getRange().getFirst() - offset, 0); - // final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); - // if (end < begin) { - // // this range is out of our viewport - // continue; - // } - // - // // iterate forward and move them backward (note: since begin is <= end, we now know it fits in an int) - // for (int j = (int) begin; j <= end; ++j) { - // for (int i = 0; i < data.length; ++i) { - // final JsArray existingColumnData = Js.uncheckedCast(data[i]); - // if (existingColumnData == null) { - // continue; - // } - // - // final long internalOffsetAsLong = j + shiftDelta; - // if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int - // final int internalOffset = (int) internalOffsetAsLong; - // updated.added.add(internalOffset); - // existingColumnData.setAt(internalOffset, existingColumnData.getAt(j)); - // } - // - // updated.removed.add(j); - // existingColumnData.setAt(j, NULL_SENTINEL); - // } - // } - // } - // } - // - // DeltaUpdates.ColumnModifications[] serializedModifications = updates.getSerializedModifications(); - // for (int modifiedColIndex = 0; modifiedColIndex < serializedModifications.length; modifiedColIndex++) { - // final DeltaUpdates.ColumnModifications modifiedColumn = serializedModifications[modifiedColIndex]; - // final OfLong it = modifiedColumn == null ? null : modifiedColumn.getRowsIncluded().indexIterator(); - // - // if (it == null || !it.hasNext()) { - // continue; - // } - // - // // look for a local Column which matches this index so we know how to clean it - // final Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); - // final JsArray updatedColumnData = - // Js.uncheckedCast(cleanData(modifiedColumn.getValues().getData(), column)); - // final JsArray existingColumnData = Js.uncheckedCast(data[modifiedColumn.getColumnIndex()]); - // if (updatedColumnData.length == 0) { - // continue; - // } - // - // // for each change provided for this column, replace the values in our store - // int i = 0; - // while (it.hasNext()) { - // long modifiedOffset = it.nextLong(); - // int internalOffset = (int) (modifiedOffset - offset); - // if (internalOffset < 0 || internalOffset >= maxLength) { - // i++; - // continue;// data we don't need to see, either meant for another table, or we just sent a viewport - // // update - // } - // existingColumnData.setAt(internalOffset, updatedColumnData.getAtAsAny(i)); - // updated.modified.add(internalOffset); - // i++; - // } - // } - // - // if (!updates.getIncludedAdditions().isEmpty()) { - // DeltaUpdates.ColumnAdditions[] serializedAdditions = updates.getSerializedAdditions(); - // for (int addedColIndex = 0; addedColIndex < serializedAdditions.length; addedColIndex++) { - // DeltaUpdates.ColumnAdditions addedColumn = serializedAdditions[addedColIndex]; - // - // Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); - // final JsArray addedColumnData = - // Js.uncheckedCast(cleanData(addedColumn.getValues().getData(), column)); - // final JsArray existingColumnData = Js.uncheckedCast(data[addedColumn.getColumnIndex()]); - // if (addedColumnData.length == 0) { - // continue; - // } - // - // int i = 0; - // OfLong it = updates.getIncludedAdditions().indexIterator(); - // while (it.hasNext()) { - // long addedOffset = it.nextLong(); - // int internalOffset = (int) (addedOffset - offset); - // if (internalOffset < 0 || internalOffset >= maxLength) { - // i++; - // continue;// data we don't need to see, either meant for another table, or we just sent a - // // viewport update - // } - // assert internalOffset < existingColumnData.length; - // - // Any existing = existingColumnData.getAt(internalOffset); - // if (existing == NULL_SENTINEL || internalOffset >= length) { - // // space was set aside or was left at the end of the array for this value, it is a new addition - // updated.added.add(internalOffset); - // } else { - // // we're overwriting some existing value - // updated.modified.add(internalOffset); - // } - // existingColumnData.setAt(internalOffset, addedColumnData.getAtAsAny(i)); - // i++; - // } - // } - // } - // - // // exclude added items from being marked as modified, since we're hiding shifts from api consumers - // updated.modified.removeAll(updated.added); - // - // // Any position which was both added and removed should instead be marked as modified, this cleans - // // up anything excluded above that didn't otherwise make sense - // for (Iterator it = updated.removed.iterator(); it.hasNext();) { - // int ii = it.next(); - // if (updated.added.remove(ii)) { - // it.remove(); - // updated.modified.add(ii); - // } - // } - // - // length = length + updated.added.size() - updated.removed.size(); - // assert 0 <= length && length <= maxLength; - // - // // Viewport footprint should be small enough that we can afford to see if this update corrupted our view of the - // // world: - // assert !dataContainsNullSentinels(); - // - // return updated; - // } - - private boolean dataContainsNullSentinels() { - for (int i = 0; i < data.length; i++) { - JsArray existingColumnData = Js.uncheckedCast(data[i]); - if (existingColumnData == null) { - continue; - } - for (int j = 0; j < length; ++j) { - if (existingColumnData.getAt(j) == NULL_SENTINEL) { - return true; - } - } - } - return false; - } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index e4e76075c1a..13ddf31f7f5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -32,6 +32,7 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; import io.deephaven.web.client.api.*; import io.deephaven.web.client.api.barrage.WebBarrageUtils; +import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.stream.BiDiStream; @@ -231,6 +232,35 @@ private TreeViewportData(double offset, long viewportSize, double treeSize, Colu } } + @Override + public JsRangeSet getFullIndex() { + return new JsRangeSet(RangeSet.ofRange((long) offset, (long) (offset + rows.length - 1))); + } + + /** + * Always returns empty for TreeTable. + */ + @Override + public JsRangeSet getAdded() { + return new JsRangeSet(RangeSet.empty()); + } + + /** + * Always returns empty for TreeTable. + */ + @Override + public JsRangeSet getRemoved() { + return new JsRangeSet(RangeSet.empty()); + } + + /** + * Always returns empty for TreeTable. + */ + @Override + public JsRangeSet getModified() { + return new JsRangeSet(RangeSet.empty()); + } + @Override public Row get(long index) { return getRows().getAt((int) index); @@ -262,7 +292,7 @@ public Format getFormat(long index, Column column) { } @JsProperty - public double getOffset() { + public Double getOffset() { return offset; } diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java index 19e1a1ddf4f..8a1959fa671 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java @@ -59,28 +59,28 @@ public void testViewportOnStaticTable() { // table has 100 rows, go through each page of 25, make sure the offset and length is sane table.setViewport(0, 24, null); return assertUpdateReceived(table, viewport -> { - assertEquals(0, (long) viewport.getOffset()); + assertEquals(0d, viewport.getOffset()); assertEquals(25, viewport.getRows().length); }, 2100); }) .then(table -> { table.setViewport(25, 49, null); return assertUpdateReceived(table, viewport -> { - assertEquals(25, (long) viewport.getOffset()); + assertEquals(25d, viewport.getOffset()); assertEquals(25, viewport.getRows().length); }, 2101); }) .then(table -> { table.setViewport(50, 74, null); return assertUpdateReceived(table, viewport -> { - assertEquals(50, (long) viewport.getOffset()); + assertEquals(50d, viewport.getOffset()); assertEquals(25, viewport.getRows().length); }, 2102); }) .then(table -> { table.setViewport(75, 99, null); return assertUpdateReceived(table, viewport -> { - assertEquals(75, (long) viewport.getOffset()); + assertEquals(75d, viewport.getOffset()); assertEquals(25, viewport.getRows().length); }, 2103); }) @@ -361,7 +361,7 @@ public void testRapidChangingViewport() { table.setViewport(0, 10, null); table.setViewport(5, 14, null); return assertUpdateReceived(table, viewport -> { - assertEquals(5, (int) viewport.getOffset()); + assertEquals(5d, viewport.getOffset()); assertEquals(10, (int) viewport.getRows().length); }, 1008); }) @@ -373,7 +373,7 @@ public void testRapidChangingViewport() { .then(table -> { table.setViewport(6, 14, null); return assertUpdateReceived(table, viewport -> { - assertEquals(6, (int) viewport.getOffset()); + assertEquals(6d, viewport.getOffset()); assertEquals(9, (int) viewport.getRows().length); }, 1009); }) @@ -387,10 +387,7 @@ public void testRapidChangingViewport() { table.setViewport(7, 17, null); return assertUpdateReceived(table, ignored -> { }, 1010) - .then(waitFor(JsTable.DEBOUNCE_TIME * 2)) .then(t -> { - // force the debounce to be processed - t.processSnapshot(); t.getViewportData().then(vp -> { // assertEquals(7, (int) vp.getOffset()); assertEquals(11, (int) vp.getRows().length); From 07f3c088ceab941e268ddbcdd7f71245e0140534 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 16 Apr 2024 15:27:04 -0500 Subject: [PATCH 026/219] Draft sharing with tree --- .../AbstractTableSubscription.java | 35 +- .../web/client/api/tree/JsTreeTable.java | 689 +++++++++--------- .../client/api/HierarchicalTableTestGwt.java | 2 +- 3 files changed, 373 insertions(+), 353 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index cbca8970dc4..10f8af94411 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -6,9 +6,9 @@ import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; +import com.vertispan.tsdefs.annotations.TsTypeRef; import elemental2.core.JsArray; import elemental2.dom.CustomEventInit; -import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.ColumnConversionMode; @@ -25,6 +25,7 @@ import io.deephaven.web.client.api.barrage.WebBarrageUtils; import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; import io.deephaven.web.client.api.barrage.stream.BiDiStream; +import io.deephaven.web.client.api.barrage.stream.ResponseStreamWrapper; import io.deephaven.web.client.fu.JsSettings; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.RangeSet; @@ -32,12 +33,11 @@ import jsinterop.annotations.JsProperty; import jsinterop.base.Any; import jsinterop.base.Js; +import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.BitSet; -import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; - public abstract class AbstractTableSubscription extends HasEventHandling { /** * Indicates that some new data is available on the client, either an initial snapshot or a delta update. The @@ -54,7 +54,7 @@ public abstract class AbstractTableSubscription extends HasEventHandling { private BarrageSubscriptionOptions options; private final BiDiStream doExchange; - private final WebBarrageSubscription barrageSubscription; + protected final WebBarrageSubscription barrageSubscription; private boolean subscriptionReady; @@ -62,7 +62,7 @@ public AbstractTableSubscription(ClientTableState state, WorkerConnection connec state.retain(this); this.state = state; this.connection = connection; - rowStyleColumn = state.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + rowStyleColumn = state.getRowFormatColumn() == null ? TableData.NO_ROW_FORMAT_COLUMN : state.getRowFormatColumn().getIndex(); doExchange = @@ -75,6 +75,7 @@ public AbstractTableSubscription(ClientTableState state, WorkerConnection connec doExchange.onData(this::onFlightData); // TODO handle stream ending, error + doExchange.onEnd(this::onStreamEnd); // TODO going to need "started change" so we don't let data escape when still updating barrageSubscription = WebBarrageSubscription.subscribe(state, this::onViewportChange, this::onDataChanged); @@ -83,7 +84,7 @@ public AbstractTableSubscription(ClientTableState state, WorkerConnection connec protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray columns, Double updateIntervalMs, boolean isReverseViewport) { this.columns = columns; - this.columnBitSet = state.makeBitset(Js.uncheckedCast(columns)); + this.columnBitSet = makeColumnBitset(columns); // TODO validate that we can change updateinterval this.options = BarrageSubscriptionOptions.builder() .batchSize(WebBarrageSubscription.BATCH_SIZE) @@ -104,6 +105,10 @@ protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray doExchange.send(subscriptionRequest); } + protected BitSet makeColumnBitset(JsArray columns) { + return state.makeBitset(Js.uncheckedCast(columns)); + } + protected ClientTableState state() { return state; } @@ -146,7 +151,7 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t @TsInterface @TsName(namespace = "dh") public class SubscriptionRow implements TableData.Row { - private final long index; + protected final long index; public LongWrapper indexCached; public SubscriptionRow(long index) { @@ -224,14 +229,13 @@ public Double getOffset() { * @return {@link SubscriptionRow} array. */ @Override - public JsArray getRows() { + public JsArray<@TsTypeRef(SubscriptionRow.class) ? extends SubscriptionRow> getRows() { if (allRows == null) { allRows = new JsArray<>(); RangeSet rowSet = barrageSubscription.getCurrentRowSet(); RangeSet positions = transformRowsetForConsumer(rowSet); - DomGlobal.console.log(rowSet, positions); positions.indexIterator().forEachRemaining((long index) -> { - allRows.push(new SubscriptionRow(index)); + allRows.push(makeRow(index)); }); if (JsSettings.isDevMode()) { assert allRows.length == positions.size(); @@ -240,6 +244,10 @@ public JsArray getRows() { return allRows; } + protected SubscriptionRow makeRow(long index) { + return new SubscriptionRow(index); + } + @Override public Row get(int index) { return this.get((long) index); @@ -253,7 +261,7 @@ public Row get(int index) { */ @Override public SubscriptionRow get(long index) { - return new SubscriptionRow(index); + return makeRow(index); } @Override @@ -384,6 +392,11 @@ private void onFlightData(FlightData data) { } } + protected void onStreamEnd(ResponseStreamWrapper.Status status) { + // TODO handle stream end/error + } + + /** * The columns that were subscribed to when this subscription was created * diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 13ddf31f7f5..8d03f0a6682 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -11,6 +11,7 @@ import elemental2.core.JsArray; import elemental2.core.JsObject; import elemental2.core.Uint8Array; +import elemental2.dom.CustomEvent; import elemental2.dom.CustomEventInit; import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; @@ -32,20 +33,20 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; import io.deephaven.web.client.api.*; import io.deephaven.web.client.api.barrage.WebBarrageUtils; -import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.stream.BiDiStream; +import io.deephaven.web.client.api.barrage.stream.ResponseStreamWrapper; import io.deephaven.web.client.api.filter.FilterCondition; import io.deephaven.web.client.api.impl.TicketAndPromise; import io.deephaven.web.client.api.lifecycle.HasLifecycle; +import io.deephaven.web.client.api.subscription.AbstractTableSubscription; import io.deephaven.web.client.api.subscription.ViewportData; -import io.deephaven.web.client.api.subscription.ViewportRow; -import io.deephaven.web.client.api.tree.JsTreeTable.TreeViewportData.TreeRow; import io.deephaven.web.client.api.widget.JsWidget; import io.deephaven.web.client.fu.JsItr; import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.fu.LazyPromise; +import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.*; import io.deephaven.web.shared.data.columns.ColumnData; import javaemul.internal.annotations.DoNotAutobox; @@ -63,13 +64,13 @@ import org.apache.arrow.flatbuf.RecordBatch; import org.apache.arrow.flatbuf.Schema; import org.gwtproject.nio.TypedArrayHelper; +import org.jetbrains.annotations.NotNull; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; -import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; /** * Behaves like a {@link JsTable} externally, but data, state, and viewports are managed by an entirely different @@ -108,7 +109,7 @@ * roll-up table, the totals only include leaf nodes (as non-leaf nodes are generated through grouping the contents of * the original table). Roll-ups also have the {@link JsRollupConfig#includeConstituents} property, indicating that a * {@link Column} in the tree may have a {@link Column#getConstituentType()} property reflecting that the type of cells - * where {@link TreeRow#hasChildren()} is false will be different from usual. + * where {@link TreeSubscription.TreeRow#hasChildren()} is false will be different from usual. * */ @JsType(namespace = "dh", name = "TreeTable") @@ -126,245 +127,6 @@ public class JsTreeTable extends HasLifecycle implements ServerObject { private static final double ACTION_EXPAND_WITH_DESCENDENTS = 0b011; private static final double ACTION_COLLAPSE = 0b100; - @TsInterface - @TsName(namespace = "dh") - public class TreeViewportData implements TableData { - private final Boolean[] expandedColumn; - private final int[] depthColumn; - private final double offset; - private final double treeSize; - - private final JsArray columns; - private final JsArray rows; - - private TreeViewportData(double offset, long viewportSize, double treeSize, ColumnData[] dataColumns, - Column[] columns) { - this.offset = offset; - this.treeSize = treeSize; - this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); - - // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. As such, - // we'll just clean the data that the requested columns know about for now. - // TODO to improve this, we can have synthetic columns to handle data that wasn't requested/expected, - // and then can share code with ViewportData - Object[] data = new Object[dataColumns.length]; - - expandedColumn = Js.uncheckedCast( - ViewportData.cleanData(dataColumns[rowExpandedCol.getIndex()].getData(), rowExpandedCol)); - depthColumn = Js.uncheckedCast( - ViewportData.cleanData(dataColumns[rowDepthCol.getIndex()].getData(), rowDepthCol)); - - int constituentDepth = keyColumns.length + 2; - - // Without modifying this.columns (copied and frozen), make sure our key columns are present - // in the list of columns that we will copy data for the viewport - keyColumns.forEach((col, p1, p2) -> { - if (this.columns.indexOf(col) == -1) { - columns[columns.length] = col; - } - return null; - }); - - for (int i = 0; i < columns.length; i++) { - Column c = columns[i]; - int index = c.getIndex(); - - // clean the data, since it will be exposed to the client - data[index] = ViewportData.cleanData(dataColumns[index].getData(), c); - if (c.getStyleColumnIndex() != null) { - data[c.getStyleColumnIndex()] = dataColumns[c.getStyleColumnIndex()].getData(); - } - if (c.getFormatStringColumnIndex() != null) { - data[c.getFormatStringColumnIndex()] = dataColumns[c.getFormatStringColumnIndex()].getData(); - } - - // if there is a matching constituent column array, clean it and copy from it - Column sourceColumn = sourceColumns.get(c.getName()); - if (sourceColumn != null) { - ColumnData constituentColumn = dataColumns[sourceColumn.getIndex()]; - if (constituentColumn != null) { - JsArray cleanConstituentColumn = - Js.uncheckedCast(ViewportData.cleanData(constituentColumn.getData(), sourceColumn)); - // Overwrite the data with constituent values, if any - // We use cleanConstituentColumn to find max item rather than data[index], since we - // are okay stopping at the last constituent value, in case the server sends shorter - // arrays. - for (int rowIndex = 0; rowIndex < cleanConstituentColumn.length; rowIndex++) { - if (depthColumn[rowIndex] == constituentDepth) - Js.asArrayLike(data[index]).setAt(rowIndex, cleanConstituentColumn.getAt(rowIndex)); - } - - if (sourceColumn.getStyleColumnIndex() != null) { - assert c.getStyleColumnIndex() != null; - ColumnData styleData = dataColumns[sourceColumn.getStyleColumnIndex()]; - if (styleData != null) { - JsArray styleArray = Js.cast(styleData.getData()); - for (int rowIndex = 0; rowIndex < styleArray.length; rowIndex++) { - if (depthColumn[rowIndex] == constituentDepth) - Js.asArrayLike(data[c.getStyleColumnIndex()]).setAt(rowIndex, - styleArray.getAt(rowIndex)); - } - } - } - if (sourceColumn.getFormatStringColumnIndex() != null) { - assert c.getFormatStringColumnIndex() != null; - ColumnData formatData = dataColumns[sourceColumn.getFormatStringColumnIndex()]; - if (formatData != null) { - JsArray formatArray = Js.cast(formatData.getData()); - for (int rowIndex = 0; rowIndex < formatArray.length; rowIndex++) { - if (depthColumn[rowIndex] == constituentDepth) { - Js.asArrayLike(data[c.getFormatStringColumnIndex()]).setAt(rowIndex, - formatArray.getAt(rowIndex)); - } - } - } - } - } - } - } - if (rowFormatColumn != NO_ROW_FORMAT_COLUMN) { - data[rowFormatColumn] = dataColumns[rowFormatColumn].getData(); - } - - rows = new JsArray<>(); - for (int i = 0; i < viewportSize; i++) { - rows.push(new TreeRow(i, data, data[rowFormatColumn])); - } - } - - @Override - public JsRangeSet getFullIndex() { - return new JsRangeSet(RangeSet.ofRange((long) offset, (long) (offset + rows.length - 1))); - } - - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getAdded() { - return new JsRangeSet(RangeSet.empty()); - } - - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getRemoved() { - return new JsRangeSet(RangeSet.empty()); - } - - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getModified() { - return new JsRangeSet(RangeSet.empty()); - } - - @Override - public Row get(long index) { - return getRows().getAt((int) index); - } - - @Override - public Row get(int index) { - return getRows().getAt((int) index); - } - - @Override - public Any getData(int index, Column column) { - return getRows().getAt(index).get(column); - } - - @Override - public Any getData(long index, Column column) { - return getRows().getAt((int) index).get(column); - } - - @Override - public Format getFormat(int index, Column column) { - return getRows().getAt(index).getFormat(column); - } - - @Override - public Format getFormat(long index, Column column) { - return getRows().getAt((int) index).getFormat(column); - } - - @JsProperty - public Double getOffset() { - return offset; - } - - @JsProperty - public JsArray getColumns() { - return columns; - } - - @JsProperty - public JsArray getRows() { - return rows; - } - - public double getTreeSize() { - return treeSize; - } - - /** - * Row implementation that also provides additional read-only properties. represents visible rows in the table, - * but with additional properties to reflect the tree structure. - */ - @TsInterface - @TsName(namespace = "dh") - public class TreeRow extends ViewportRow { - public TreeRow(int offsetInSnapshot, Object[] dataColumns, Object rowStyleColumn) { - super(offsetInSnapshot, dataColumns, rowStyleColumn); - } - - /** - * True if this node is currently expanded to show its children; false otherwise. Those children will be the - * rows below this one with a greater depth than this one - * - * @return boolean - */ - @JsProperty(name = "isExpanded") - public boolean isExpanded() { - return expandedColumn[offsetInSnapshot] == Boolean.TRUE; - } - - /** - * True if this node has children and can be expanded; false otherwise. Note that this value may change when - * the table updates, depending on the table's configuration - * - * @return boolean - */ - @JsProperty(name = "hasChildren") - public boolean hasChildren() { - return expandedColumn[offsetInSnapshot] != null; - } - - /** - * The number of levels above this node; zero for top level nodes. Generally used by the UI to indent the - * row and its expand/collapse icon - * - * @return int - */ - @JsProperty(name = "depth") - public int depth() { - return depthColumn[offsetInSnapshot]; - } - - public void appendKeyData(Object[][] keyTableData, double action) { - int i; - for (i = 0; i < keyColumns.length; i++) { - Js.>cast(keyTableData[i]).push(keyColumns.getAt(i).get(this)); - } - Js.>cast(keyTableData[i++]).push((double) depth()); - Js.>cast(keyTableData[i++]).push(action); - } - } - } - /** * Ordered series of steps that must be performed when changes are made to the table. When any change is applied, * all subsequent steps must be performed as well. @@ -404,7 +166,7 @@ private enum RebuildStep { private Promise keyTable; private TicketAndPromise viewTicket; - private Promise> stream; + private Promise stream; // the "next" set of filters/sorts that we'll use. these either are "==" to the above fields, or are scheduled // to replace them soon. @@ -417,7 +179,7 @@ private enum RebuildStep { private Column[] columns; private int updateInterval = 1000; - private TreeViewportData currentViewportData; + private TreeSubscription.TreeViewportData currentViewportData; private boolean alwaysFireNextEvent = false; @@ -630,6 +392,242 @@ private TicketAndPromise makeView(TicketAndPromise prevTicket) { return viewTicket; } + class TreeSubscription extends AbstractTableSubscription { + @TsInterface + @TsName(namespace = "dh") + public class TreeViewportData extends AbstractTableSubscription.UpdateEventData { + private final double treeSize; + +// private final JsArray columns; + private final int constituentDepth; + + private TreeViewportData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + super(added, removed, modified, shifted); + + +// this.offset = offset; + this.treeSize = barrageSubscription.getCurrentRowSet().size(); + constituentDepth = keyColumns.length + 2; +// this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); +// +// // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. As such, +// // we'll just clean the data that the requested columns know about for now. +// // TODO to improve this, we can have synthetic columns to handle data that wasn't requested/expected, +// // and then can share code with ViewportData +// Object[] data = new Object[dataColumns.length]; +// +// +// // Without modifying this.columns (copied and frozen), make sure our key columns are present +// // in the list of columns that we will copy data for the viewport +// keyColumns.forEach((col, p1, p2) -> { +// if (this.columns.indexOf(col) == -1) { +// columns[columns.length] = col; +// } +// return null; +// }); + } + +// /** +// * Always returns empty for TreeTable. +// */ +// @Override +// public JsRangeSet getAdded() { +// return new JsRangeSet(RangeSet.empty()); +// } +// +// /** +// * Always returns empty for TreeTable. +// */ +// @Override +// public JsRangeSet getRemoved() { +// return new JsRangeSet(RangeSet.empty()); +// } +// +// /** +// * Always returns empty for TreeTable. +// */ +// @Override +// public JsRangeSet getModified() { +// return new JsRangeSet(RangeSet.empty()); +// } + + @Override + public Any getData(int index, Column column) { + Column sourceColumn = sourceColumns.get(column.getName()); + if (sourceColumn == null) { + // no constituent column, call super + return super.getData(index, column); + } + if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + // not at constituent depth, call super + return super.getData(index, column); + } + // read source col instead + return super.getData(index, sourceColumn); + } + + @Override + public Any getData(long index, Column column) { + Column sourceColumn = sourceColumns.get(column.getName()); + if (sourceColumn == null) { + // no constituent column, call super + return super.getData(index, column); + } + if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + // not at constituent depth, call super + return super.getData(index, column); + } + // read source col instead + return super.getData(index, sourceColumn); + } + + @Override + public Format getFormat(int index, Column column) { + Column sourceColumn = sourceColumns.get(column.getName()); + if (sourceColumn == null) { + // no constituent column, call super + return super.getFormat(index, column); + } + if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + // not at constituent depth, call super + return super.getFormat(index, column); + } + // read source col instead + return super.getFormat(index, sourceColumn); + } + + @Override + public Format getFormat(long index, Column column) { + Column sourceColumn = sourceColumns.get(column.getName()); + if (sourceColumn == null) { + // no constituent column, call super + return super.getFormat(index, column); + } + if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + // not at constituent depth, call super + return super.getFormat(index, column); + } + // read source col instead + return super.getFormat(index, sourceColumn); + } + +// @JsProperty +// public JsArray getColumns() { +// // This looks like its superclass, but we're actually returning a different field +// return columns; +// } + + @JsProperty + @Override + public JsArray getRows() { + return (JsArray) super.getRows(); + } + + @Override + protected SubscriptionRow makeRow(long index) { + return new TreeRow(index); + } + + public double getTreeSize() { + return treeSize; + } + } + + /** + * Row implementation that also provides additional read-only properties. represents visible rows in the table, + * but with additional properties to reflect the tree structure. + */ + @TsInterface + @TsName(namespace = "dh") + public class TreeRow extends SubscriptionRow { + + public TreeRow(long index) { + super(index); + } + + /** + * True if this node is currently expanded to show its children; false otherwise. Those children will be the + * rows below this one with a greater depth than this one. + * + * @return boolean + */ + @JsProperty(name = "isExpanded") + public boolean isExpanded() { + return barrageSubscription.getData(index, rowExpandedCol.getIndex()) == Boolean.TRUE; + } + + /** + * True if this node has children and can be expanded; false otherwise. Note that this value may change when + * the table updates, depending on the table's configuration. + * + * @return boolean + */ + @JsProperty(name = "hasChildren") + public boolean hasChildren() { + return barrageSubscription.getData(index, rowExpandedCol.getIndex()) != null; + } + + /** + * The number of levels above this node; zero for top level nodes. Generally used by the UI to indent the + * row and its expand/collapse icon. + * + * @return int + */ + @JsProperty(name = "depth") + public int depth() { + return barrageSubscription.getData(index, rowDepthCol.getIndex()); + } + + public void appendKeyData(Object[][] keyTableData, double action) { + int i; + for (i = 0; i < keyColumns.length; i++) { + Js.>cast(keyTableData[i]).push(keyColumns.getAt(i).get(this)); + } + Js.>cast(keyTableData[i++]).push((double) depth()); + Js.>cast(keyTableData[i++]).push(action); + } + } + + // TODO move to superclass and check on viewport change + private RangeSet serverViewport; + + public TreeSubscription(ClientTableState state, WorkerConnection connection) { + super(state, connection); + } + + @Override + protected BitSet makeColumnBitset(JsArray columns) { + return makeColumnSubscriptionBitset(); + } + + @Override + protected void onStreamEnd(ResponseStreamWrapper.Status status) { + super.onStreamEnd(status); + JsTreeTable.this.stream = null; + if (!status.isOk()) { + failureHandled(status.getDetails()); + } + } + + public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable JsArray columns, + @JsNullable @JsOptional Double updateInterval) { + + //TODO + + sendBarrageSubscriptionRequest(RangeSet.ofRange((long) firstRow, (long) lastRow), Js.uncheckedCast(columns), updateInterval, false); + } + + @Override + protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { + // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... + TreeViewportData detail = new TreeViewportData(rowsAdded, rowsRemoved, totalMods, shifted); + detail.offset = this.serverViewport.getFirstRow(); + CustomEventInit event = CustomEventInit.create(); + event.setDetail(detail); + fireEvent(EVENT_UPDATED, event); + } + } + private void replaceSubscription(RebuildStep step) { // Perform steps required to remove the existing intermediate tickets. // Fall-through between steps is deliberate. @@ -652,15 +650,14 @@ private void replaceSubscription(RebuildStep step) { case SUBSCRIPTION: if (stream != null) { stream.then(stream -> { - stream.end(); - stream.cancel(); + stream.close(); return null; }); stream = null; } } - Promise> stream = Promise.resolve(defer()) + Promise stream = Promise.resolve(defer()) .then(ignore -> { makeKeyTable(); TicketAndPromise filter = prepareFilter(); @@ -686,87 +683,98 @@ private void replaceSubscription(RebuildStep step) { columnsBitset, range, alwaysFireEvent); - BiDiStream doExchange = - connection.streamFactory().create( - headers -> connection.flightServiceClient().doExchange(headers), - (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, - headers), - (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, - headers, - c::apply), - new FlightData()); - - FlightData subscriptionRequestWrapper = new FlightData(); - FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); - int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(doGetRequest, - columnsBitset.toByteArray()); - int viewportOffset = BarrageSubscriptionRequest.createViewportVector(doGetRequest, - serializeRanges(Collections.singleton(range))); - int serializationOptionsOffset = BarrageSubscriptionOptions - .createBarrageSubscriptionOptions(doGetRequest, ColumnConversionMode.Stringify, true, - updateInterval, 0, 0, false); - int tableTicketOffset = - BarrageSubscriptionRequest.createTicketVector(doGetRequest, - Js.uncheckedCast(viewTicket.ticket().getTicket_asU8())); - BarrageSubscriptionRequest.startBarrageSubscriptionRequest(doGetRequest); - BarrageSubscriptionRequest.addTicket(doGetRequest, tableTicketOffset); - BarrageSubscriptionRequest.addColumns(doGetRequest, columnsOffset); - BarrageSubscriptionRequest.addSubscriptionOptions(doGetRequest, serializationOptionsOffset); - BarrageSubscriptionRequest.addViewport(doGetRequest, viewportOffset); - doGetRequest.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(doGetRequest)); - - subscriptionRequestWrapper.setAppMetadata( - WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSubscriptionRequest)); - doExchange.send(subscriptionRequestWrapper); - - String[] columnTypes = Arrays.stream(tableDefinition.getColumns()) - .map(ColumnDefinition::getType) - .toArray(String[]::new); - doExchange.onStatus(status -> { - if (!status.isOk()) { - failureHandled(status.getDetails()); - } - }); - doExchange.onEnd(status -> { - this.stream = null; - }); - doExchange.onData(flightData -> { - Message message = - Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); - if (message.headerType() == MessageHeader.Schema) { - // ignore for now, we'll handle this later - return; - } - assert message.headerType() == MessageHeader.RecordBatch; - RecordBatch header = (RecordBatch) message.header(new RecordBatch()); - Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); - BarrageUpdateMetadata update = null; - if (appMetadataBytes.length != 0) { - BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper - .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); - - update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - barrageMessageWrapper.msgPayloadAsByteBuffer()); - } - TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, - WebBarrageUtils - .typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), - update, true, columnTypes); - - final RangeSet includedRows = snapshot.getIncludedRows(); - double offset = firstRow; - assert includedRows.isEmpty() || Js.asInt(offset) == includedRows.getFirstRow(); - TreeViewportData vd = new TreeViewportData( - offset, - includedRows.isEmpty() ? 0 : includedRows.size(), - snapshot.getTableSize(), - snapshot.getDataColumns(), - queryColumns); - - handleUpdate(nextSort, nextFilters, vd, alwaysFireEvent); + + //TODO pass in the current ticket instead of state + TreeSubscription subscription = new TreeSubscription(null, connection); + + subscription.addEventListener(TreeSubscription.EVENT_UPDATED, (CustomEvent data) -> { + TreeSubscription.TreeViewportData detail = (TreeSubscription.TreeViewportData) data.detail; + + handleUpdate(nextSort, nextFilters, detail, alwaysFireEvent); }); - return Promise.resolve(doExchange); + return Promise.resolve(subscription); + +// BiDiStream doExchange = +// connection.streamFactory().create( +// headers -> connection.flightServiceClient().doExchange(headers), +// (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, +// headers), +// (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, +// headers, +// c::apply), +// new FlightData()); +// +// FlightData subscriptionRequestWrapper = new FlightData(); +// FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); +// int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(doGetRequest, +// columnsBitset.toByteArray()); +// int viewportOffset = BarrageSubscriptionRequest.createViewportVector(doGetRequest, +// serializeRanges(Collections.singleton(range))); +// int serializationOptionsOffset = BarrageSubscriptionOptions +// .createBarrageSubscriptionOptions(doGetRequest, ColumnConversionMode.Stringify, true, +// updateInterval, 0, 0, false); +// int tableTicketOffset = +// BarrageSubscriptionRequest.createTicketVector(doGetRequest, +// Js.uncheckedCast(viewTicket.ticket().getTicket_asU8())); +// BarrageSubscriptionRequest.startBarrageSubscriptionRequest(doGetRequest); +// BarrageSubscriptionRequest.addTicket(doGetRequest, tableTicketOffset); +// BarrageSubscriptionRequest.addColumns(doGetRequest, columnsOffset); +// BarrageSubscriptionRequest.addSubscriptionOptions(doGetRequest, serializationOptionsOffset); +// BarrageSubscriptionRequest.addViewport(doGetRequest, viewportOffset); +// doGetRequest.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(doGetRequest)); +// +// subscriptionRequestWrapper.setAppMetadata( +// WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSubscriptionRequest)); +// doExchange.send(subscriptionRequestWrapper); +// +// String[] columnTypes = Arrays.stream(tableDefinition.getColumns()) +// .map(ColumnDefinition::getType) +// .toArray(String[]::new); +// doExchange.onStatus(status -> { +// if (!status.isOk()) { +// failureHandled(status.getDetails()); +// } +// }); +// doExchange.onEnd(status -> { +// this.stream = null; +// }); +// doExchange.onData(flightData -> { +// Message message = +// Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); +// if (message.headerType() == MessageHeader.Schema) { +// // ignore for now, we'll handle this later +// return; +// } +// assert message.headerType() == MessageHeader.RecordBatch; +// RecordBatch header = (RecordBatch) message.header(new RecordBatch()); +// Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); +// BarrageUpdateMetadata update = null; +// if (appMetadataBytes.length != 0) { +// BarrageMessageWrapper barrageMessageWrapper = +// BarrageMessageWrapper +// .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); +// +// update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( +// barrageMessageWrapper.msgPayloadAsByteBuffer()); +// } +// TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, +// WebBarrageUtils +// .typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), +// update, true, columnTypes); +// +// final RangeSet includedRows = snapshot.getIncludedRows(); +// double offset = firstRow; +// assert includedRows.isEmpty() || Js.asInt(offset) == includedRows.getFirstRow(); +// TreeSubscription.TreeViewportData vd = new TreeSubscription.TreeViewportData( +// offset, +// includedRows.size(), +// snapshot.getTableSize(), +// snapshot.getDataColumns(), +// queryColumns); +// +// handleUpdate(nextSort, nextFilters, vd, alwaysFireEvent); +// }); +// return Promise.resolve(doExchange); }); stream.catch_(err -> { // if this is the active attempt at a subscription, report the error @@ -789,7 +797,7 @@ private IThenable.ThenOnFulfilledCallbackFn defer() { } private void handleUpdate(List nextSort, List nextFilters, - TreeViewportData viewportData, boolean alwaysFireEvent) { + TreeSubscription.TreeViewportData viewportData, boolean alwaysFireEvent) { JsLog.debug("tree table response arrived", viewportData); if (closed) { // ignore @@ -805,7 +813,7 @@ private void handleUpdate(List nextSort, List nextFilters this.filters = nextFilters; if (fireEvent) { - CustomEventInit updatedEvent = CustomEventInit.create(); + CustomEventInit updatedEvent = CustomEventInit.create(); updatedEvent.setDetail(viewportData); fireEvent(EVENT_UPDATED, updatedEvent); } @@ -887,7 +895,7 @@ static RowReferenceUnion of(@DoNotAutobox Object o) { @JsOverlay default boolean isTreeRow() { - return this instanceof TreeRow; + return this instanceof TreeSubscription.TreeRow; } @JsOverlay @@ -897,7 +905,7 @@ default boolean isNumber() { @JsOverlay @TsUnionMember - default TreeRow asTreeRow() { + default TreeSubscription.TreeRow asTreeRow() { return Js.cast(this); } @@ -928,9 +936,9 @@ public void setExpanded(RowReferenceUnion row, boolean isExpanded, @JsOptional B action = ACTION_EXPAND; } - final TreeRow r; + final TreeSubscription.TreeRow r; if (row.isNumber()) { - r = currentViewportData.rows.getAt((int) (row.asNumber() - currentViewportData.offset)); + r = currentViewportData.getRows().getAt((int) (row.asNumber() - currentViewportData.offset)); } else if (row.isTreeRow()) { r = row.asTreeRow(); } else { @@ -957,9 +965,9 @@ public void collapseAll() { * @return boolean */ public boolean isExpanded(RowReferenceUnion row) { - final TreeRow r; + final TreeSubscription.TreeRow r; if (row.isNumber()) { - r = currentViewportData.rows.getAt((int) (row.asNumber() - currentViewportData.offset)); + r = currentViewportData.getRows().getAt((int) (row.asNumber() - currentViewportData.offset)); } else if (row.isTreeRow()) { r = row.asTreeRow(); } else { @@ -980,8 +988,8 @@ public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable replaceSubscription(RebuildStep.SUBSCRIPTION); } - public Promise getViewportData() { - LazyPromise promise = new LazyPromise<>(); + public Promise getViewportData() { + LazyPromise promise = new LazyPromise<>(); if (currentViewportData == null) { // only one of these two will fire, and when they do, they'll remove both handlers. @@ -1044,8 +1052,7 @@ public void close() { } if (stream != null) { stream.then(stream -> { - stream.end(); - stream.cancel(); + stream.close(); return null; }); stream = null; diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java index 3e244ba9cd8..209e2b40be7 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java @@ -69,7 +69,7 @@ public void testRefreshingTreeTable() { // Wait for the table to tick such that the first row has children return waitForEventWhere(treeTable, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> d.detail.getTreeSize() == 1 + (CustomEvent d) -> d.detail.getTreeSize() == 1 && d.detail.getRows().getAt(0).hasChildren(), 10001).then(data -> { treeTable.expand(JsTreeTable.RowReferenceUnion.of(0), null); From adadaffc9e7bb1338507546b470465c42bbc0ccd Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 18 Apr 2024 11:59:02 -0500 Subject: [PATCH 027/219] Basic working tree subscriptions --- .../io/deephaven/web/client/api/JsTable.java | 110 ++++---- .../deephaven/web/client/api/TableData.java | 14 +- .../barrage/data/WebBarrageSubscription.java | 33 +++ .../AbstractTableSubscription.java | 12 +- .../client/api/subscription/ViewportData.java | 8 +- .../web/client/api/tree/JsTreeTable.java | 264 ++++++------------ .../web/client/state/ClientTableState.java | 6 +- 7 files changed, 199 insertions(+), 248 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index d20cf2f6ac2..2606c924a75 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -1930,61 +1930,61 @@ public WorkerConnection getConnection() { return workerConnection; } -// public void refreshViewport(ClientTableState state, Viewport vp) { -// assert state() == state : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; -// assert state.getResolution() == ClientTableState.ResolutionState.RUNNING -// : "Do not call refreshViewport for a state that is not running! (" + state + ")"; -// -// currentViewportData = null; // ignore any deltas for past viewports -// workerConnection.scheduleCheck(state); -// // now that we've made sure the server knows, if we already know that the viewport is beyond what exists, we -// // can go ahead and fire an update event. We're in the onResolved call, so we know the handle has resolved -// // and if size is not -1, then we've already at least gotten the initial snapshot (otherwise, that snapshot -// // will be here soon, and will fire its own event) -// if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED && state.getSize() <= vp.getRows().getFirstRow()) { -// JsLog.debug("Preparing to send a 'fake' update event since " + state.getSize() + "<=" -// + vp.getRows().getFirstRow(), state); -// LazyPromise.runLater(() -> { -// if (state != state()) { -// return; -// } -// -// // get the column expected to be in the snapshot -// JsArray columns = null;// Js.uncheckedCast(getBinding().getColumns()); -// Column[] allColumns = state.getColumns(); -// if (columns == null) { -// columns = Js.uncheckedCast(allColumns); -// } -// // build an array of empty column data for this snapshot -// Object[] dataColumns = new Object[allColumns.length]; -// -// for (int i = 0; i < columns.length; i++) { -// Column c = columns.getAt(i); -// dataColumns[c.getIndex()] = JsData.newArray(c.getType()); -// if (c.getFormatStringColumnIndex() != null) { -// dataColumns[c.getFormatStringColumnIndex()] = JsData.newArray("java.lang.String"); -// } -// if (c.getStyleColumnIndex() != null) { -// dataColumns[c.getStyleColumnIndex()] = JsData.newArray("long"); -// } -// } -// if (currentState.getRowFormatColumn() != null) { -// dataColumns[currentState.getRowFormatColumn().getIndex()] = JsData.newArray("long"); -// } -// -// ViewportData data = new ViewportData(RangeSet.empty(), dataColumns, columns, -// currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN -// : currentState.getRowFormatColumn().getIndex(), -// 0); -// this.currentViewportData = data; -// CustomEventInit updatedEvent = CustomEventInit.create(); -// updatedEvent.setDetail(data); -// JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" + vp.getRows().getFirstRow(), -// vp, state); -// fireEvent(EVENT_UPDATED, updatedEvent); -// }); -// } -// } + // public void refreshViewport(ClientTableState state, Viewport vp) { + // assert state() == state : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; + // assert state.getResolution() == ClientTableState.ResolutionState.RUNNING + // : "Do not call refreshViewport for a state that is not running! (" + state + ")"; + // + // currentViewportData = null; // ignore any deltas for past viewports + // workerConnection.scheduleCheck(state); + // // now that we've made sure the server knows, if we already know that the viewport is beyond what exists, we + // // can go ahead and fire an update event. We're in the onResolved call, so we know the handle has resolved + // // and if size is not -1, then we've already at least gotten the initial snapshot (otherwise, that snapshot + // // will be here soon, and will fire its own event) + // if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED && state.getSize() <= vp.getRows().getFirstRow()) { + // JsLog.debug("Preparing to send a 'fake' update event since " + state.getSize() + "<=" + // + vp.getRows().getFirstRow(), state); + // LazyPromise.runLater(() -> { + // if (state != state()) { + // return; + // } + // + // // get the column expected to be in the snapshot + // JsArray columns = null;// Js.uncheckedCast(getBinding().getColumns()); + // Column[] allColumns = state.getColumns(); + // if (columns == null) { + // columns = Js.uncheckedCast(allColumns); + // } + // // build an array of empty column data for this snapshot + // Object[] dataColumns = new Object[allColumns.length]; + // + // for (int i = 0; i < columns.length; i++) { + // Column c = columns.getAt(i); + // dataColumns[c.getIndex()] = JsData.newArray(c.getType()); + // if (c.getFormatStringColumnIndex() != null) { + // dataColumns[c.getFormatStringColumnIndex()] = JsData.newArray("java.lang.String"); + // } + // if (c.getStyleColumnIndex() != null) { + // dataColumns[c.getStyleColumnIndex()] = JsData.newArray("long"); + // } + // } + // if (currentState.getRowFormatColumn() != null) { + // dataColumns[currentState.getRowFormatColumn().getIndex()] = JsData.newArray("long"); + // } + // + // ViewportData data = new ViewportData(RangeSet.empty(), dataColumns, columns, + // currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + // : currentState.getRowFormatColumn().getIndex(), + // 0); + // this.currentViewportData = data; + // CustomEventInit updatedEvent = CustomEventInit.create(); + // updatedEvent.setDetail(data); + // JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" + vp.getRows().getFirstRow(), + // vp, state); + // fireEvent(EVENT_UPDATED, updatedEvent); + // }); + // } + // } public boolean isActive(ClientTableState state) { return currentState == state; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java index 739b49d26e4..8167f23b5cc 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java @@ -17,12 +17,12 @@ import jsinterop.base.Js; /** - * Common interface for various ways of accessing table data and formatting for viewport or non-viewport subscriptions on tables, - * data in trees, and snapshots. + * Common interface for various ways of accessing table data and formatting for viewport or non-viewport subscriptions + * on tables, data in trees, and snapshots. *

- * Generally speaking, it is more efficient to access data in column-major order, rather than iterating through - * each Row and accessing all columns that it holds. The {@link #getRows()} accessor can be useful to read row data, - * but may incur other costs - it is likely faster to access data by columns using {@link #getData(RowPositionUnion, Column)}. + * Generally speaking, it is more efficient to access data in column-major order, rather than iterating through each Row + * and accessing all columns that it holds. The {@link #getRows()} accessor can be useful to read row data, but may + * incur other costs - it is likely faster to access data by columns using {@link #getData(RowPositionUnion, Column)}. */ /* * Java note: this interface contains some extra overloads that aren't available in JS. Implementations are expected to @@ -73,8 +73,8 @@ default int asInt() { @JsProperty JsRangeSet getModified(); -// @JsProperty -// JsShiftData getShifts(); + // @JsProperty + // JsShiftData getShifts(); @JsProperty JsArray getColumns(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 0ceb9c6bcf3..29f4025c6b5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -7,11 +7,13 @@ import elemental2.core.JsArray; import elemental2.dom.DomGlobal; import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; +import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.ObjectChunk; +import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.web.client.api.barrage.CompressedRangeSetReader; import io.deephaven.web.client.api.barrage.WebBarrageMessage; @@ -56,8 +58,39 @@ public static WebBarrageSubscription subscribe(ClientTableState cts, ViewportCha case Char: break; case Byte: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ByteChunk byteChunk = data.asByteChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(byteChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; break; case Short: + dataSinks[i] = new WebDataSink() { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ShortChunk shortChunk = data.asShortChunk(); + int i = 0; + while (destIterator.hasNext()) { + arr.setAt((int) destIterator.nextLong(), Js.asAny(shortChunk.get(i++))); + } + } + + @Override + public T get(long position) { + return (T) arr.getAt((int) position); + } + }; + break; case Int: dataSinks[i] = new WebDataSink() { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 10f8af94411..80de0cd6f5f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -33,7 +33,6 @@ import jsinterop.annotations.JsProperty; import jsinterop.base.Any; import jsinterop.base.Js; -import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.BitSet; @@ -141,8 +140,7 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t transformRowsetForConsumer(rowsAdded), transformRowsetForConsumer(rowsRemoved), transformRowsetForConsumer(totalMods), - barrageSubscription.getServerViewport() != null ? null : shifted - ); + barrageSubscription.getServerViewport() != null ? null : shifted); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); fireEvent(TableSubscription.EVENT_UPDATED, event); @@ -356,14 +354,14 @@ public JsRangeSet getFullIndex() { } /** - * If a viewport is in use, transforms the given rowset to position space based on - * that viewport. + * If a viewport is in use, transforms the given rowset to position space based on that viewport. + * * @param rowSet the rowset to possibly transform * @return a transformed rowset */ private RangeSet transformRowsetForConsumer(RangeSet rowSet) { if (barrageSubscription.getServerViewport() != null) { - return rowSet.subsetForPositions(barrageSubscription.getServerViewport(), false);//TODO reverse + return rowSet.subsetForPositions(barrageSubscription.getServerViewport(), false);// TODO reverse } return rowSet; } @@ -373,7 +371,7 @@ protected void onViewportChange(RangeSet serverViewport, BitSet serverColumns, b // throw new IllegalStateException("Not a viewport subscription"); // } subscriptionReady = (serverColumns == null && columnBitSet == null) - || (serverColumns == null && columnBitSet.cardinality() == state.getColumns().length) + || (serverColumns == null && columnBitSet.cardinality() == state.getTableDef().getColumns().length) || (serverColumns != null && serverColumns.equals(this.columnBitSet)); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index 48064ae10d2..abda3f2bc49 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -24,10 +24,10 @@ /** * Extends {@link TableData}, but only contains data in the current viewport. The only API change from TableData is that - * ViewportData also contains the offset to this data, so that the actual row number may be - * determined. - *

Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided - * for easier scrolling without going to the server. + * ViewportData also contains the offset to this data, so that the actual row number may be determined. + *

+ * Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided for easier + * scrolling without going to the server. */ @TsInterface @TsName(namespace = "dh") diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 8d03f0a6682..f669a23f4d8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -3,7 +3,6 @@ // package io.deephaven.web.client.api.tree; -import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; import com.vertispan.tsdefs.annotations.TsUnion; @@ -16,39 +15,31 @@ import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; import elemental2.promise.Promise; -import io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionOptions; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; -import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.barrage.flatbuf.ColumnConversionMode; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableApplyRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableDescriptor; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableSourceExportRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableViewKeyTableDescriptor; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.hierarchicaltable_pb.HierarchicalTableViewRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.Condition; +import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse; +import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.TableReference; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.Ticket; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; import io.deephaven.web.client.api.*; import io.deephaven.web.client.api.barrage.WebBarrageUtils; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; -import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.api.barrage.stream.ResponseStreamWrapper; import io.deephaven.web.client.api.filter.FilterCondition; import io.deephaven.web.client.api.impl.TicketAndPromise; import io.deephaven.web.client.api.lifecycle.HasLifecycle; import io.deephaven.web.client.api.subscription.AbstractTableSubscription; -import io.deephaven.web.client.api.subscription.ViewportData; import io.deephaven.web.client.api.widget.JsWidget; import io.deephaven.web.client.fu.JsItr; import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.fu.LazyPromise; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.*; -import io.deephaven.web.shared.data.columns.ColumnData; import javaemul.internal.annotations.DoNotAutobox; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsNullable; @@ -59,12 +50,7 @@ import jsinterop.annotations.JsType; import jsinterop.base.Any; import jsinterop.base.Js; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; import org.apache.arrow.flatbuf.Schema; -import org.gwtproject.nio.TypedArrayHelper; -import org.jetbrains.annotations.NotNull; import java.util.*; import java.util.function.Function; @@ -143,7 +129,6 @@ private enum RebuildStep { private final InitialTableDefinition tableDefinition; private final Column[] visibleColumns; private final Map columnsByName = new HashMap<>(); - private final int rowFormatColumn; private final Map sourceColumns; private final JsArray keyColumns = new JsArray<>(); private Column rowDepthCol; @@ -247,7 +232,6 @@ public JsTreeTable(WorkerConnection workerConnection, JsWidget widget) { columnDefsByName.get(true).get(definition.getRollupAggregationInputColumn()).getType()); } } - this.rowFormatColumn = rowFormatColumn; this.groupedColumns = JsObject.freeze(groupedColumns); sourceColumns = columnDefsByName.get(false).values().stream() @@ -398,58 +382,58 @@ class TreeSubscription extends AbstractTableSubscription { public class TreeViewportData extends AbstractTableSubscription.UpdateEventData { private final double treeSize; -// private final JsArray columns; + // private final JsArray columns; private final int constituentDepth; private TreeViewportData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { super(added, removed, modified, shifted); -// this.offset = offset; + // this.offset = offset; this.treeSize = barrageSubscription.getCurrentRowSet().size(); constituentDepth = keyColumns.length + 2; -// this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); -// -// // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. As such, -// // we'll just clean the data that the requested columns know about for now. -// // TODO to improve this, we can have synthetic columns to handle data that wasn't requested/expected, -// // and then can share code with ViewportData -// Object[] data = new Object[dataColumns.length]; -// -// -// // Without modifying this.columns (copied and frozen), make sure our key columns are present -// // in the list of columns that we will copy data for the viewport -// keyColumns.forEach((col, p1, p2) -> { -// if (this.columns.indexOf(col) == -1) { -// columns[columns.length] = col; -// } -// return null; -// }); + // this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); + // + // // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. As such, + // // we'll just clean the data that the requested columns know about for now. + // // TODO to improve this, we can have synthetic columns to handle data that wasn't requested/expected, + // // and then can share code with ViewportData + // Object[] data = new Object[dataColumns.length]; + // + // + // // Without modifying this.columns (copied and frozen), make sure our key columns are present + // // in the list of columns that we will copy data for the viewport + // keyColumns.forEach((col, p1, p2) -> { + // if (this.columns.indexOf(col) == -1) { + // columns[columns.length] = col; + // } + // return null; + // }); } -// /** -// * Always returns empty for TreeTable. -// */ -// @Override -// public JsRangeSet getAdded() { -// return new JsRangeSet(RangeSet.empty()); -// } -// -// /** -// * Always returns empty for TreeTable. -// */ -// @Override -// public JsRangeSet getRemoved() { -// return new JsRangeSet(RangeSet.empty()); -// } -// -// /** -// * Always returns empty for TreeTable. -// */ -// @Override -// public JsRangeSet getModified() { -// return new JsRangeSet(RangeSet.empty()); -// } + // /** + // * Always returns empty for TreeTable. + // */ + // @Override + // public JsRangeSet getAdded() { + // return new JsRangeSet(RangeSet.empty()); + // } + // + // /** + // * Always returns empty for TreeTable. + // */ + // @Override + // public JsRangeSet getRemoved() { + // return new JsRangeSet(RangeSet.empty()); + // } + // + // /** + // * Always returns empty for TreeTable. + // */ + // @Override + // public JsRangeSet getModified() { + // return new JsRangeSet(RangeSet.empty()); + // } @Override public Any getData(int index, Column column) { @@ -511,17 +495,17 @@ public Format getFormat(long index, Column column) { return super.getFormat(index, sourceColumn); } -// @JsProperty -// public JsArray getColumns() { -// // This looks like its superclass, but we're actually returning a different field -// return columns; -// } + // @JsProperty + // public JsArray getColumns() { + // // This looks like its superclass, but we're actually returning a different field + // return columns; + // } - @JsProperty - @Override - public JsArray getRows() { - return (JsArray) super.getRows(); - } + // @JsProperty + // @Override + // public JsArray getRows() { + // return (JsArray) super.getRows(); + // } @Override protected SubscriptionRow makeRow(long index) { @@ -575,7 +559,7 @@ public boolean hasChildren() { */ @JsProperty(name = "depth") public int depth() { - return barrageSubscription.getData(index, rowDepthCol.getIndex()); + return Js.coerceToInt(barrageSubscription.getData(index, rowDepthCol.getIndex())); } public void appendKeyData(Object[][] keyTableData, double action) { @@ -597,7 +581,9 @@ public TreeSubscription(ClientTableState state, WorkerConnection connection) { @Override protected BitSet makeColumnBitset(JsArray columns) { - return makeColumnSubscriptionBitset(); + BitSet requested = super.makeColumnBitset(columns); + requested.or(makeColumnSubscriptionBitset()); + return requested; } @Override @@ -609,16 +595,17 @@ protected void onStreamEnd(ResponseStreamWrapper.Status status) { } } - public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable JsArray columns, - @JsNullable @JsOptional Double updateInterval) { + public void setViewport(double firstRow, double lastRow, JsArray columns, Double updateInterval) { + // TODO + serverViewport = RangeSet.ofRange((long) firstRow, (long) lastRow); - //TODO - - sendBarrageSubscriptionRequest(RangeSet.ofRange((long) firstRow, (long) lastRow), Js.uncheckedCast(columns), updateInterval, false); + sendBarrageSubscriptionRequest(RangeSet.ofRange((long) firstRow, (long) lastRow), Js.uncheckedCast(columns), + updateInterval, false); } @Override - protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { + protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, + ShiftedRange[] shifted) { // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... TreeViewportData detail = new TreeViewportData(rowsAdded, rowsRemoved, totalMods, shifted); detail.offset = this.serverViewport.getFirstRow(); @@ -673,7 +660,7 @@ private void replaceSubscription(RebuildStep step) { BitSet columnsBitset = makeColumnSubscriptionBitset(); RangeSet range = RangeSet.ofRange((long) (double) firstRow, (long) (double) lastRow); - Column[] queryColumns = this.columns; + // Column[] queryColumns = this.columns; boolean alwaysFireEvent = this.alwaysFireNextEvent; this.alwaysFireNextEvent = false; @@ -684,97 +671,30 @@ private void replaceSubscription(RebuildStep step) { range, alwaysFireEvent); - //TODO pass in the current ticket instead of state - TreeSubscription subscription = new TreeSubscription(null, connection); - - subscription.addEventListener(TreeSubscription.EVENT_UPDATED, (CustomEvent data) -> { - TreeSubscription.TreeViewportData detail = (TreeSubscription.TreeViewportData) data.detail; - - handleUpdate(nextSort, nextFilters, detail, alwaysFireEvent); - }); + // TODO pass in the current ticket instead of state. for now this is a workaround. + ClientTableState state = new ClientTableState(connection, + new TableTicket(viewTicket.ticket().getTicket_asU8()), (callback, newState, metadata) -> { + callback.apply("fail, trees dont reconnect like this", null); + }, ""); + ExportedTableCreationResponse def = new ExportedTableCreationResponse(); + HierarchicalTableDescriptor treeDescriptor = + HierarchicalTableDescriptor.deserializeBinary(widget.getDataAsU8()); + def.setSchemaHeader(treeDescriptor.getSnapshotSchema_asU8()); + def.setResultId(new TableReference()); + def.getResultId().setTicket(viewTicket.ticket()); + state.applyTableCreationResponse(def); + + TreeSubscription subscription = new TreeSubscription(state, connection); + subscription.setViewport(firstRow, lastRow, Js.uncheckedCast(columns), (double) updateInterval); + + subscription.addEventListener(TreeSubscription.EVENT_UPDATED, + (CustomEvent data) -> { + TreeSubscription.TreeViewportData detail = + (TreeSubscription.TreeViewportData) data.detail; + + handleUpdate(nextSort, nextFilters, detail, alwaysFireEvent); + }); return Promise.resolve(subscription); - -// BiDiStream doExchange = -// connection.streamFactory().create( -// headers -> connection.flightServiceClient().doExchange(headers), -// (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, -// headers), -// (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, -// headers, -// c::apply), -// new FlightData()); -// -// FlightData subscriptionRequestWrapper = new FlightData(); -// FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); -// int columnsOffset = BarrageSubscriptionRequest.createColumnsVector(doGetRequest, -// columnsBitset.toByteArray()); -// int viewportOffset = BarrageSubscriptionRequest.createViewportVector(doGetRequest, -// serializeRanges(Collections.singleton(range))); -// int serializationOptionsOffset = BarrageSubscriptionOptions -// .createBarrageSubscriptionOptions(doGetRequest, ColumnConversionMode.Stringify, true, -// updateInterval, 0, 0, false); -// int tableTicketOffset = -// BarrageSubscriptionRequest.createTicketVector(doGetRequest, -// Js.uncheckedCast(viewTicket.ticket().getTicket_asU8())); -// BarrageSubscriptionRequest.startBarrageSubscriptionRequest(doGetRequest); -// BarrageSubscriptionRequest.addTicket(doGetRequest, tableTicketOffset); -// BarrageSubscriptionRequest.addColumns(doGetRequest, columnsOffset); -// BarrageSubscriptionRequest.addSubscriptionOptions(doGetRequest, serializationOptionsOffset); -// BarrageSubscriptionRequest.addViewport(doGetRequest, viewportOffset); -// doGetRequest.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(doGetRequest)); -// -// subscriptionRequestWrapper.setAppMetadata( -// WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSubscriptionRequest)); -// doExchange.send(subscriptionRequestWrapper); -// -// String[] columnTypes = Arrays.stream(tableDefinition.getColumns()) -// .map(ColumnDefinition::getType) -// .toArray(String[]::new); -// doExchange.onStatus(status -> { -// if (!status.isOk()) { -// failureHandled(status.getDetails()); -// } -// }); -// doExchange.onEnd(status -> { -// this.stream = null; -// }); -// doExchange.onData(flightData -> { -// Message message = -// Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); -// if (message.headerType() == MessageHeader.Schema) { -// // ignore for now, we'll handle this later -// return; -// } -// assert message.headerType() == MessageHeader.RecordBatch; -// RecordBatch header = (RecordBatch) message.header(new RecordBatch()); -// Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); -// BarrageUpdateMetadata update = null; -// if (appMetadataBytes.length != 0) { -// BarrageMessageWrapper barrageMessageWrapper = -// BarrageMessageWrapper -// .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); -// -// update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( -// barrageMessageWrapper.msgPayloadAsByteBuffer()); -// } -// TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, -// WebBarrageUtils -// .typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), -// update, true, columnTypes); -// -// final RangeSet includedRows = snapshot.getIncludedRows(); -// double offset = firstRow; -// assert includedRows.isEmpty() || Js.asInt(offset) == includedRows.getFirstRow(); -// TreeSubscription.TreeViewportData vd = new TreeSubscription.TreeViewportData( -// offset, -// includedRows.size(), -// snapshot.getTableSize(), -// snapshot.getDataColumns(), -// queryColumns); -// -// handleUpdate(nextSort, nextFilters, vd, alwaysFireEvent); -// }); -// return Promise.resolve(doExchange); }); stream.catch_(err -> { // if this is the active attempt at a subscription, report the error @@ -797,7 +717,7 @@ private IThenable.ThenOnFulfilledCallbackFn defer() { } private void handleUpdate(List nextSort, List nextFilters, - TreeSubscription.TreeViewportData viewportData, boolean alwaysFireEvent) { + TreeSubscription.TreeViewportData viewportData, boolean alwaysFireEvent) { JsLog.debug("tree table response arrived", viewportData); if (closed) { // ignore @@ -938,7 +858,7 @@ public void setExpanded(RowReferenceUnion row, boolean isExpanded, @JsOptional B final TreeSubscription.TreeRow r; if (row.isNumber()) { - r = currentViewportData.getRows().getAt((int) (row.asNumber() - currentViewportData.offset)); + r = (TreeSubscription.TreeRow) currentViewportData.getRows().getAt((int) (row.asNumber())); } else if (row.isTreeRow()) { r = row.asTreeRow(); } else { @@ -967,7 +887,7 @@ public void collapseAll() { public boolean isExpanded(RowReferenceUnion row) { final TreeSubscription.TreeRow r; if (row.isNumber()) { - r = currentViewportData.getRows().getAt((int) (row.asNumber() - currentViewportData.offset)); + r = (TreeSubscription.TreeRow) currentViewportData.getRows().getAt((int) (row.asNumber())); } else if (row.isTreeRow()) { r = row.asTreeRow(); } else { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 65a113a04b3..f1743168645 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -75,8 +75,8 @@ public ChunkType[] chunkTypes() { } public Class[] columnTypes() { - return Arrays.stream(getColumns()) - .map(Column::getType) + return Arrays.stream(tableDef.getColumns()) + .map(ColumnDefinition::getType) .map(t -> { switch (t) { case "boolean": @@ -109,7 +109,7 @@ public Class[] columnTypes() { public Class[] componentTypes() { // The only componentType that matters is byte.class - return Arrays.stream(getColumns()).map(Column::getType).map(t -> { + return Arrays.stream(tableDef.getColumns()).map(ColumnDefinition::getType).map(t -> { if (!t.endsWith("[]")) { return null; } From ff87bb2d732e24726f4e0f00fb03d53437b359a1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 24 Apr 2024 06:12:52 -0500 Subject: [PATCH 028/219] Move ChunkListInputStreamGenerator to top-level class --- .../barrage/BarrageStreamGeneratorImpl.java | 89 +++++-------------- .../ChunkListInputStreamGenerator.java | 56 ++++++++++++ 2 files changed, 79 insertions(+), 66 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index c62ef1020c3..1fa9684d61b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -14,7 +14,6 @@ import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; @@ -130,57 +129,13 @@ protected void writeHeader( } } - public static class ChunkListInputStreamGenerator implements SafeCloseable { - public ChunkInputStreamGenerator[] generators; - public ChunkInputStreamGenerator emptyGenerator; - - ChunkListInputStreamGenerator(BarrageMessage.AddColumnData acd) { - // create an input stream generator for each chunk - generators = new ChunkInputStreamGenerator[acd.data.size()]; - - long rowOffset = 0; - for (int i = 0; i < acd.data.size(); ++i) { - final Chunk valuesChunk = acd.data.get(i); - generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator( - valuesChunk.getChunkType(), acd.type, acd.componentType, valuesChunk, rowOffset); - rowOffset += valuesChunk.size(); - } - emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( - acd.chunkType, acd.type, acd.componentType, acd.chunkType.getEmptyChunk(), 0); - } - - ChunkListInputStreamGenerator(BarrageMessage.ModColumnData mcd) { - // create an input stream generator for each chunk - generators = new ChunkInputStreamGenerator[mcd.data.size()]; - - long rowOffset = 0; - for (int i = 0; i < mcd.data.size(); ++i) { - final Chunk valuesChunk = mcd.data.get(i); - generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator( - mcd.chunkType, mcd.type, mcd.componentType, valuesChunk, rowOffset); - rowOffset += valuesChunk.size(); - } - emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( - mcd.chunkType, mcd.type, mcd.componentType, mcd.chunkType.getEmptyChunk(), 0); - } - - @Override - public void close() { - for (int i = 0; i < generators.length; i++) { - generators[i].close(); - generators[i] = null; - } - emptyGenerator.close(); - } - } - public static class ModColumnData { public final RowSetGenerator rowsModified; public final ChunkListInputStreamGenerator data; ModColumnData(final BarrageMessage.ModColumnData col) throws IOException { rowsModified = new RowSetGenerator(col.rowsModified); - data = new ChunkListInputStreamGenerator(col); + data = new ChunkListInputStreamGenerator(col.type, col.componentType, col.data, col.chunkType); } } @@ -225,8 +180,10 @@ public BarrageStreamGeneratorImpl(final BarrageMessage message, addColumnData = new ChunkListInputStreamGenerator[message.addColumnData.length]; for (int i = 0; i < message.addColumnData.length; ++i) { - addColumnData[i] = new ChunkListInputStreamGenerator(message.addColumnData[i]); - addGeneratorCount = Math.max(addGeneratorCount, addColumnData[i].generators.length); + BarrageMessage.AddColumnData columnData = message.addColumnData[i]; + addColumnData[i] = new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, + columnData.data, columnData.chunkType); + addGeneratorCount = Math.max(addGeneratorCount, addColumnData[i].generators().size()); } modColumnData = new ModColumnData[message.modColumnData.length]; @@ -827,18 +784,18 @@ private void processBatches(Consumer visitor, final View view, } } - private static int findGeneratorForOffset(final ChunkInputStreamGenerator[] generators, final long offset) { + private static int findGeneratorForOffset(final List generators, final long offset) { // fast path for smaller updates - if (generators.length <= 1) { + if (generators.isEmpty()) { return 0; } int low = 0; - int high = generators.length; + int high = generators.size(); while (low + 1 < high) { int mid = (low + high) / 2; - int cmp = Long.compare(generators[mid].getRowOffset(), offset); + int cmp = Long.compare(generators.get(mid).getRowOffset(), offset); if (cmp < 0) { // the generator's first key is low enough @@ -865,7 +822,7 @@ private int appendAddColumns(final View view, final long startRange, final int t // find the generator for the initial position-space key long startPos = view.addRowOffsets().get(startRange); - int chunkIdx = findGeneratorForOffset(addColumnData[0].generators, startPos); + int chunkIdx = findGeneratorForOffset(addColumnData[0].generators(), startPos); // adjust the batch size if we would cross a chunk boundary long shift = 0; @@ -873,8 +830,8 @@ private int appendAddColumns(final View view, final long startRange, final int t if (endPos == RowSet.NULL_ROW_KEY) { endPos = Long.MAX_VALUE; } - if (addColumnData[0].generators.length > 0) { - final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators[chunkIdx]; + if (!addColumnData[0].generators().isEmpty()) { + final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators().get(chunkIdx); endPos = Math.min(endPos, tmpGenerator.getLastRowOffset()); shift = -tmpGenerator.getRowOffset(); } @@ -885,7 +842,7 @@ private int appendAddColumns(final View view, final long startRange, final int t final RowSet adjustedOffsets = shift == 0 ? null : myAddedOffsets.shift(shift)) { // every column must write to the stream for (final ChunkListInputStreamGenerator data : addColumnData) { - final int numElements = data.generators.length == 0 + final int numElements = data.generators().isEmpty() ? 0 : myAddedOffsets.intSize("BarrageStreamGenerator"); if (view.options().columnsAsList()) { @@ -901,7 +858,7 @@ private int appendAddColumns(final View view, final long startRange, final int t // use an empty generator to publish the column data try (final RowSet empty = RowSetFactory.empty()) { final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - data.emptyGenerator.getInputStream(view.options(), empty); + data.empty(view.options(), empty); drainableColumn.visitFieldNodes(fieldNodeListener); drainableColumn.visitBuffers(bufferListener); @@ -909,7 +866,7 @@ private int appendAddColumns(final View view, final long startRange, final int t addStream.accept(drainableColumn); } } else { - final ChunkInputStreamGenerator generator = data.generators[chunkIdx]; + final ChunkInputStreamGenerator generator = data.generators().get(chunkIdx); final ChunkInputStreamGenerator.DrainableColumn drainableColumn = generator.getInputStream(view.options(), shift == 0 ? myAddedOffsets : adjustedOffsets); drainableColumn.visitFieldNodes(fieldNodeListener); @@ -934,8 +891,8 @@ private int appendModColumns(final View view, final long startRange, final int t // adjust the batch size if we would cross a chunk boundary for (int ii = 0; ii < modColumnData.length; ++ii) { final ModColumnData mcd = modColumnData[ii]; - final ChunkInputStreamGenerator[] generators = mcd.data.generators; - if (generators.length == 0) { + final List generators = mcd.data.generators(); + if (generators.isEmpty()) { continue; } @@ -944,8 +901,8 @@ private int appendModColumns(final View view, final long startRange, final int t final long startPos = modOffsets != null ? modOffsets.get(startRange) : startRange; if (startPos != RowSet.NULL_ROW_KEY) { final int chunkIdx = findGeneratorForOffset(generators, startPos); - if (chunkIdx < generators.length - 1) { - maxLength = Math.min(maxLength, generators[chunkIdx].getLastRowOffset() + 1 - startPos); + if (chunkIdx < generators.size() - 1) { + maxLength = Math.min(maxLength, generators.get(chunkIdx).getLastRowOffset() + 1 - startPos); } columnChunkIdx[ii] = chunkIdx; } @@ -955,9 +912,9 @@ private int appendModColumns(final View view, final long startRange, final int t long numRows = 0; for (int ii = 0; ii < modColumnData.length; ++ii) { final ModColumnData mcd = modColumnData[ii]; - final ChunkInputStreamGenerator generator = mcd.data.generators.length > 0 - ? mcd.data.generators[columnChunkIdx[ii]] - : null; + final ChunkInputStreamGenerator generator = mcd.data.generators().isEmpty() + ? null + : mcd.data.generators().get(columnChunkIdx[ii]); final RowSet modOffsets = view.modRowOffsets(ii); long startPos, endPos; @@ -1005,7 +962,7 @@ private int appendModColumns(final View view, final long startRange, final int t // use the empty generator to publish the column data try (final RowSet empty = RowSetFactory.empty()) { final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - mcd.data.emptyGenerator.getInputStream(view.options(), empty); + mcd.data.empty(view.options(), empty); drainableColumn.visitFieldNodes(fieldNodeListener); drainableColumn.visitBuffers(bufferListener); // Add the drainable last as it is allowed to immediately close a row set the visitors need diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java new file mode 100644 index 00000000000..7aeb20c8b9f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java @@ -0,0 +1,56 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.table.impl.util.BarrageMessage; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.SafeCloseable; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +public class ChunkListInputStreamGenerator implements SafeCloseable { + private final List generators; + private final ChunkInputStreamGenerator emptyGenerator; + + public ChunkListInputStreamGenerator(Class type, Class componentType, List> data, + ChunkType chunkType) { + // create an input stream generator for each chunk + ChunkInputStreamGenerator[] generators = new ChunkInputStreamGenerator[data.size()]; + + long rowOffset = 0; + for (int i = 0; i < data.size(); ++i) { + final Chunk valuesChunk = data.get(i); + generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, + valuesChunk, rowOffset); + rowOffset += valuesChunk.size(); + } + this.generators = Arrays.asList(generators); + emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( + chunkType, type, componentType, chunkType.getEmptyChunk(), 0); + } + + public List generators() { + return generators; + } + + public ChunkInputStreamGenerator.DrainableColumn empty(StreamReaderOptions options, RowSet rowSet) + throws IOException { + return emptyGenerator.getInputStream(options, rowSet); + } + + @Override + public void close() { + for (ChunkInputStreamGenerator generator : generators) { + generator.close(); + } + emptyGenerator.close(); + } +} From 91fba5fb24129a7518859f6292e539393ae90fc8 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 25 Apr 2024 10:43:18 -0500 Subject: [PATCH 029/219] Make fields private, remove unused ones --- .../barrage/BarrageStreamGeneratorImpl.java | 88 ++++++++----------- 1 file changed, 39 insertions(+), 49 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 1fa9684d61b..49faede6516 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -130,8 +130,8 @@ protected void writeHeader( } public static class ModColumnData { - public final RowSetGenerator rowsModified; - public final ChunkListInputStreamGenerator data; + private final RowSetGenerator rowsModified; + private final ChunkListInputStreamGenerator data; ModColumnData(final BarrageMessage.ModColumnData col) throws IOException { rowsModified = new RowSetGenerator(col.rowsModified); @@ -139,23 +139,21 @@ public static class ModColumnData { } } - public final BarrageMessage message; - public final BarragePerformanceLog.WriteMetricsConsumer writeConsumer; + private final BarrageMessage message; + private final BarragePerformanceLog.WriteMetricsConsumer writeConsumer; - public final long firstSeq; - public final long lastSeq; - public final long step; + private final long firstSeq; + private final long lastSeq; - public final boolean isSnapshot; + private final boolean isSnapshot; - public final RowSetGenerator rowsAdded; - public final RowSetGenerator rowsIncluded; - public final RowSetGenerator rowsRemoved; - public final RowSetShiftDataGenerator shifted; + private final RowSetGenerator rowsAdded; + private final RowSetGenerator rowsIncluded; + private final RowSetGenerator rowsRemoved; + private final RowSetShiftDataGenerator shifted; - public final ChunkListInputStreamGenerator[] addColumnData; - public int addGeneratorCount = 0; - public final ModColumnData[] modColumnData; + private final ChunkListInputStreamGenerator[] addColumnData; + private final ModColumnData[] modColumnData; /** * Create a barrage stream generator that can slice and dice the barrage message for delivery to clients. @@ -170,7 +168,6 @@ public BarrageStreamGeneratorImpl(final BarrageMessage message, try { firstSeq = message.firstSeq; lastSeq = message.lastSeq; - step = message.step; isSnapshot = message.isSnapshot; rowsAdded = new RowSetGenerator(message.rowsAdded); @@ -183,7 +180,6 @@ public BarrageStreamGeneratorImpl(final BarrageMessage message, BarrageMessage.AddColumnData columnData = message.addColumnData[i]; addColumnData[i] = new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, columnData.data, columnData.chunkType); - addGeneratorCount = Math.max(addGeneratorCount, addColumnData[i].generators().size()); } modColumnData = new ModColumnData[message.modColumnData.length]; @@ -257,19 +253,19 @@ public SubView getSubView(BarrageSubscriptionOptions options, boolean isInitialS return getSubView(options, isInitialSnapshot, null, false, null, null); } - public static class SubView implements View { - public final BarrageStreamGeneratorImpl generator; - public final BarrageSubscriptionOptions options; - public final boolean isInitialSnapshot; - public final RowSet viewport; - public final boolean reverseViewport; - public final RowSet keyspaceViewport; - public final BitSet subscribedColumns; - public final long numAddRows; - public final long numModRows; - public final RowSet addRowOffsets; - public final RowSet addRowKeys; - public final RowSet[] modRowOffsets; + public static final class SubView implements View { + private final BarrageStreamGeneratorImpl generator; + private final BarrageSubscriptionOptions options; + private final boolean isInitialSnapshot; + private final RowSet viewport; + private final boolean reverseViewport; + private final RowSet keyspaceViewport; + private final BitSet subscribedColumns; + private final long numAddRows; + private final long numModRows; + private final RowSet addRowOffsets; + private final RowSet addRowKeys; + private final RowSet[] modRowOffsets; public SubView(final BarrageStreamGeneratorImpl generator, final BarrageSubscriptionOptions options, @@ -430,16 +426,15 @@ public SnapshotView getSnapshotView(BarrageSnapshotOptions options) { return getSnapshotView(options, null, false, null, null); } - public static class SnapshotView implements View { - public final BarrageStreamGeneratorImpl generator; - public final BarrageSnapshotOptions options; - public final RowSet viewport; - public final boolean reverseViewport; - public final RowSet keyspaceViewport; - public final BitSet subscribedColumns; - public final long numAddRows; - public final RowSet addRowKeys; - public final RowSet addRowOffsets; + public static final class SnapshotView implements View { + private final BarrageStreamGeneratorImpl generator; + private final BarrageSnapshotOptions options; + private final RowSet viewport; + private final boolean reverseViewport; + private final BitSet subscribedColumns; + private final long numAddRows; + private final RowSet addRowKeys; + private final RowSet addRowOffsets; public SnapshotView(final BarrageStreamGeneratorImpl generator, final BarrageSnapshotOptions options, @@ -452,7 +447,6 @@ public SnapshotView(final BarrageStreamGeneratorImpl generator, this.viewport = viewport; this.reverseViewport = reverseViewport; - this.keyspaceViewport = keyspaceViewport; this.subscribedColumns = subscribedColumns; // precompute add row offsets @@ -509,7 +503,7 @@ public boolean isViewport() { } @Override - public final StreamReaderOptions options() { + public StreamReaderOptions options() { return options; } @@ -524,8 +518,8 @@ public RowSet modRowOffsets(int col) { } } - public static class SchemaView implements View { - final byte[] msgBytes; + public static final class SchemaView implements View { + private final byte[] msgBytes; public SchemaView(final ByteBuffer buffer) { this.msgBytes = Flight.FlightData.newBuilder() @@ -1127,7 +1121,7 @@ protected int addToFlatBuffer(final FlatBufferBuilder builder) { } public static class RowSetGenerator extends ByteArrayGenerator implements SafeCloseable { - public final RowSet original; + private final RowSet original; public RowSetGenerator(final RowSet rowSet) throws IOException { this.original = rowSet.copy(); @@ -1201,11 +1195,7 @@ public int addToFlatBuffer(final BitSet mine, final FlatBufferBuilder builder) t } public static class RowSetShiftDataGenerator extends ByteArrayGenerator { - public final RowSetShiftData original; - public RowSetShiftDataGenerator(final RowSetShiftData shifted) throws IOException { - this.original = shifted; - final RowSetBuilderSequential sRangeBuilder = RowSetFactory.builderSequential(); final RowSetBuilderSequential eRangeBuilder = RowSetFactory.builderSequential(); final RowSetBuilderSequential destBuilder = RowSetFactory.builderSequential(); From 6be69f07eb5d126382cc7cc70787fdba3d2d1265 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 30 May 2024 16:11:10 -0500 Subject: [PATCH 030/219] Make inner classes non-static and simplify --- .../barrage/BarrageStreamGeneratorImpl.java | 334 +++++++++--------- 1 file changed, 164 insertions(+), 170 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 49faede6516..8c405696da6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -237,7 +237,7 @@ public SubView getSubView(final BarrageSubscriptionOptions options, final boolean reverseViewport, @Nullable final RowSet keyspaceViewport, @Nullable final BitSet subscribedColumns) { - return new SubView(this, options, isInitialSnapshot, viewport, reverseViewport, keyspaceViewport, + return new SubView(options, isInitialSnapshot, viewport, reverseViewport, keyspaceViewport, subscribedColumns); } @@ -253,8 +253,7 @@ public SubView getSubView(BarrageSubscriptionOptions options, boolean isInitialS return getSubView(options, isInitialSnapshot, null, false, null, null); } - public static final class SubView implements View { - private final BarrageStreamGeneratorImpl generator; + public final class SubView implements View { private final BarrageSubscriptionOptions options; private final boolean isInitialSnapshot; private final RowSet viewport; @@ -267,14 +266,12 @@ public static final class SubView implements View { private final RowSet addRowKeys; private final RowSet[] modRowOffsets; - public SubView(final BarrageStreamGeneratorImpl generator, - final BarrageSubscriptionOptions options, + public SubView(final BarrageSubscriptionOptions options, final boolean isInitialSnapshot, @Nullable final RowSet viewport, final boolean reverseViewport, @Nullable final RowSet keyspaceViewport, @Nullable final BitSet subscribedColumns) { - this.generator = generator; this.options = options; this.isInitialSnapshot = isInitialSnapshot; this.viewport = viewport; @@ -283,15 +280,15 @@ public SubView(final BarrageStreamGeneratorImpl generator, this.subscribedColumns = subscribedColumns; if (keyspaceViewport != null) { - this.modRowOffsets = new WritableRowSet[generator.modColumnData.length]; + this.modRowOffsets = new WritableRowSet[modColumnData.length]; } else { this.modRowOffsets = null; } // precompute the modified column indexes, and calculate total rows needed long numModRows = 0; - for (int ii = 0; ii < generator.modColumnData.length; ++ii) { - final ModColumnData mcd = generator.modColumnData[ii]; + for (int ii = 0; ii < modColumnData.length; ++ii) { + final ModColumnData mcd = modColumnData[ii]; if (keyspaceViewport != null) { try (WritableRowSet intersect = keyspaceViewport.intersect(mcd.rowsModified.original)) { @@ -305,15 +302,15 @@ public SubView(final BarrageStreamGeneratorImpl generator, this.numModRows = numModRows; if (keyspaceViewport != null) { - addRowKeys = keyspaceViewport.intersect(generator.rowsIncluded.original); - addRowOffsets = generator.rowsIncluded.original.invert(addRowKeys); - } else if (!generator.rowsAdded.original.equals(generator.rowsIncluded.original)) { + addRowKeys = keyspaceViewport.intersect(rowsIncluded.original); + addRowOffsets = rowsIncluded.original.invert(addRowKeys); + } else if (!rowsAdded.original.equals(rowsIncluded.original)) { // there are scoped rows included in the chunks that need to be removed - addRowKeys = generator.rowsAdded.original.copy(); - addRowOffsets = generator.rowsIncluded.original.invert(addRowKeys); + addRowKeys = rowsAdded.original.copy(); + addRowOffsets = rowsIncluded.original.invert(addRowKeys); } else { - addRowKeys = generator.rowsAdded.original.copy(); - addRowOffsets = RowSetFactory.flat(generator.rowsAdded.original.size()); + addRowKeys = rowsAdded.original.copy(); + addRowOffsets = RowSetFactory.flat(rowsAdded.original.size()); } this.numAddRows = addRowOffsets.size(); @@ -322,7 +319,7 @@ public SubView(final BarrageStreamGeneratorImpl generator, @Override public void forEachStream(Consumer visitor) throws IOException { final long startTm = System.nanoTime(); - ByteBuffer metadata = generator.getSubscriptionMetadata(this); + ByteBuffer metadata = getSubscriptionMetadata(); MutableLong bytesWritten = new MutableLong(0L); // batch size is maximum, will write fewer rows when needed @@ -332,21 +329,21 @@ public void forEachStream(Consumer visitor) throws IOException { if (numAddRows == 0 && numModRows == 0) { // we still need to send a message containing metadata when there are no rows - final InputStream is = generator.getInputStream( - this, 0, 0, actualBatchSize, metadata, generator::appendAddColumns); + final InputStream is = getInputStream(this, 0, 0, actualBatchSize, metadata, + BarrageStreamGeneratorImpl.this::appendAddColumns); bytesWritten.add(is.available()); visitor.accept(is); - generator.writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); + writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); return; } // send the add batches (if any) - generator.processBatches(visitor, this, numAddRows, maxBatchSize, metadata, generator::appendAddColumns, - bytesWritten); + processBatches(visitor, this, numAddRows, maxBatchSize, metadata, + BarrageStreamGeneratorImpl.this::appendAddColumns, bytesWritten); // send the mod batches (if any) but don't send metadata twice - generator.processBatches(visitor, this, numModRows, maxBatchSize, numAddRows > 0 ? null : metadata, - generator::appendModColumns, bytesWritten); + processBatches(visitor, this, numModRows, maxBatchSize, numAddRows > 0 ? null : metadata, + BarrageStreamGeneratorImpl.this::appendModColumns, bytesWritten); // clean up the helper indexes addRowOffsets.close(); @@ -356,7 +353,7 @@ public void forEachStream(Consumer visitor) throws IOException { modViewport.close(); } } - generator.writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); + writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); } private int batchSize() { @@ -394,6 +391,84 @@ public RowSet modRowOffsets(int col) { } return modRowOffsets[col]; } + + private ByteBuffer getSubscriptionMetadata() throws IOException { + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int effectiveViewportOffset = 0; + if (isSnapshot && isViewport()) { + try (final RowSetGenerator viewportGen = new RowSetGenerator(viewport)) { + effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); + } + } + + int effectiveColumnSetOffset = 0; + if (isSnapshot && subscribedColumns != null) { + effectiveColumnSetOffset = new BitSetGenerator(subscribedColumns).addToFlatBuffer(metadata); + } + + final int rowsAddedOffset; + if (isSnapshot && !isInitialSnapshot) { + // client's don't need/want to receive the full RowSet on every snapshot + rowsAddedOffset = EmptyRowSetGenerator.INSTANCE.addToFlatBuffer(metadata); + } else { + rowsAddedOffset = rowsAdded.addToFlatBuffer(metadata); + } + + final int rowsRemovedOffset = rowsRemoved.addToFlatBuffer(metadata); + final int shiftDataOffset = shifted.addToFlatBuffer(metadata); + + // Added Chunk Data: + int addedRowsIncludedOffset = 0; + + // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same + if (isSnapshot || !addRowKeys.equals(rowsAdded.original)) { + addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(addRowKeys, metadata); + } + + // now add mod-column streams, and write the mod column indexes + TIntArrayList modOffsets = new TIntArrayList(modColumnData.length); + for (final ModColumnData mcd : modColumnData) { + final int myModRowOffset; + if (keyspaceViewport != null) { + myModRowOffset = mcd.rowsModified.addToFlatBuffer(keyspaceViewport, metadata); + } else { + myModRowOffset = mcd.rowsModified.addToFlatBuffer(metadata); + } + modOffsets.add(BarrageModColumnMetadata.createBarrageModColumnMetadata(metadata, myModRowOffset)); + } + + BarrageUpdateMetadata.startModColumnNodesVector(metadata, modOffsets.size()); + modOffsets.forEachDescending(offset -> { + metadata.addOffset(offset); + return true; + }); + final int nodesOffset = metadata.endVector(); + + BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); + BarrageUpdateMetadata.addIsSnapshot(metadata, isSnapshot); + BarrageUpdateMetadata.addFirstSeq(metadata, firstSeq); + BarrageUpdateMetadata.addLastSeq(metadata, lastSeq); + BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); + BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); + BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); + BarrageUpdateMetadata.addRemovedRows(metadata, rowsRemovedOffset); + BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); + BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); + BarrageUpdateMetadata.addModColumnNodes(metadata, nodesOffset); + BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, reverseViewport); + metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); + + final FlatBufferBuilder header = new FlatBufferBuilder(); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + BarrageMessageWrapper.startBarrageMessageWrapper(header); + BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); + BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); + BarrageMessageWrapper.addMsgPayload(header, payloadOffset); + header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); + + return header.dataBuffer().slice(); + } } /** @@ -412,7 +487,7 @@ public SnapshotView getSnapshotView(final BarrageSnapshotOptions options, final boolean reverseViewport, @Nullable final RowSet keyspaceViewport, @Nullable final BitSet snapshotColumns) { - return new SnapshotView(this, options, viewport, reverseViewport, keyspaceViewport, snapshotColumns); + return new SnapshotView(options, viewport, reverseViewport, keyspaceViewport, snapshotColumns); } /** @@ -426,8 +501,7 @@ public SnapshotView getSnapshotView(BarrageSnapshotOptions options) { return getSnapshotView(options, null, false, null, null); } - public static final class SnapshotView implements View { - private final BarrageStreamGeneratorImpl generator; + public final class SnapshotView implements View { private final BarrageSnapshotOptions options; private final RowSet viewport; private final boolean reverseViewport; @@ -436,13 +510,11 @@ public static final class SnapshotView implements View { private final RowSet addRowKeys; private final RowSet addRowOffsets; - public SnapshotView(final BarrageStreamGeneratorImpl generator, - final BarrageSnapshotOptions options, + public SnapshotView(final BarrageSnapshotOptions options, @Nullable final RowSet viewport, final boolean reverseViewport, @Nullable final RowSet keyspaceViewport, @Nullable final BitSet subscribedColumns) { - this.generator = generator; this.options = options; this.viewport = viewport; this.reverseViewport = reverseViewport; @@ -451,10 +523,10 @@ public SnapshotView(final BarrageStreamGeneratorImpl generator, // precompute add row offsets if (keyspaceViewport != null) { - addRowKeys = keyspaceViewport.intersect(generator.rowsIncluded.original); - addRowOffsets = generator.rowsIncluded.original.invert(addRowKeys); + addRowKeys = keyspaceViewport.intersect(rowsIncluded.original); + addRowOffsets = rowsIncluded.original.invert(addRowKeys); } else { - addRowKeys = generator.rowsAdded.original.copy(); + addRowKeys = rowsAdded.original.copy(); addRowOffsets = RowSetFactory.flat(addRowKeys.size()); } @@ -464,7 +536,7 @@ public SnapshotView(final BarrageStreamGeneratorImpl generator, @Override public void forEachStream(Consumer visitor) throws IOException { final long startTm = System.nanoTime(); - ByteBuffer metadata = generator.getSnapshotMetadata(this); + ByteBuffer metadata = getSnapshotMetadata(); MutableLong bytesWritten = new MutableLong(0L); // batch size is maximum, will write fewer rows when needed @@ -472,16 +544,16 @@ public void forEachStream(Consumer visitor) throws IOException { final MutableInt actualBatchSize = new MutableInt(); if (numAddRows == 0) { // we still need to send a message containing metadata when there are no rows - visitor.accept(generator.getInputStream( - this, 0, 0, actualBatchSize, metadata, generator::appendAddColumns)); + visitor.accept(getInputStream(this, 0, 0, actualBatchSize, metadata, + BarrageStreamGeneratorImpl.this::appendAddColumns)); } else { // send the add batches - generator.processBatches(visitor, this, numAddRows, maxBatchSize, metadata, generator::appendAddColumns, - bytesWritten); + processBatches(visitor, this, numAddRows, maxBatchSize, metadata, + BarrageStreamGeneratorImpl.this::appendAddColumns, bytesWritten); } addRowOffsets.close(); addRowKeys.close(); - generator.writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); + writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); } private int batchSize() { @@ -516,6 +588,58 @@ public RowSet addRowOffsets() { public RowSet modRowOffsets(int col) { throw new UnsupportedOperationException("asked for mod row on SnapshotView"); } + + private ByteBuffer getSnapshotMetadata() throws IOException { + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int effectiveViewportOffset = 0; + if (isViewport()) { + try (final RowSetGenerator viewportGen = new RowSetGenerator(viewport)) { + effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); + } + } + + int effectiveColumnSetOffset = 0; + if (subscribedColumns != null) { + effectiveColumnSetOffset = new BitSetGenerator(subscribedColumns).addToFlatBuffer(metadata); + } + + final int rowsAddedOffset = rowsAdded.addToFlatBuffer(metadata); + + // no shifts in a snapshot, but need to provide a valid structure + final int shiftDataOffset = shifted.addToFlatBuffer(metadata); + + // Added Chunk Data: + int addedRowsIncludedOffset = 0; + // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same + if (isSnapshot || !addRowKeys.equals(rowsAdded.original)) { + addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(addRowKeys, metadata); + } + + BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); + BarrageUpdateMetadata.addIsSnapshot(metadata, isSnapshot); + BarrageUpdateMetadata.addFirstSeq(metadata, firstSeq); + BarrageUpdateMetadata.addLastSeq(metadata, lastSeq); + BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); + BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); + BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); + BarrageUpdateMetadata.addRemovedRows(metadata, 0); + BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); + BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); + BarrageUpdateMetadata.addModColumnNodes(metadata, 0); + BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, reverseViewport); + metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); + + final FlatBufferBuilder header = new FlatBufferBuilder(); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + BarrageMessageWrapper.startBarrageMessageWrapper(header); + BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); + BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); + BarrageMessageWrapper.addMsgPayload(header, payloadOffset); + header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); + + return header.dataBuffer().slice(); + } } public static final class SchemaView implements View { @@ -981,136 +1105,6 @@ private int appendModColumns(final View view, final long startRange, final int t return Math.toIntExact(numRows); } - private ByteBuffer getSubscriptionMetadata(final SubView view) throws IOException { - final FlatBufferBuilder metadata = new FlatBufferBuilder(); - - int effectiveViewportOffset = 0; - if (isSnapshot && view.isViewport()) { - try (final RowSetGenerator viewportGen = new RowSetGenerator(view.viewport)) { - effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); - } - } - - int effectiveColumnSetOffset = 0; - if (isSnapshot && view.subscribedColumns != null) { - effectiveColumnSetOffset = new BitSetGenerator(view.subscribedColumns).addToFlatBuffer(metadata); - } - - final int rowsAddedOffset; - if (isSnapshot && !view.isInitialSnapshot) { - // client's don't need/want to receive the full RowSet on every snapshot - rowsAddedOffset = EmptyRowSetGenerator.INSTANCE.addToFlatBuffer(metadata); - } else { - rowsAddedOffset = rowsAdded.addToFlatBuffer(metadata); - } - - final int rowsRemovedOffset = rowsRemoved.addToFlatBuffer(metadata); - final int shiftDataOffset = shifted.addToFlatBuffer(metadata); - - // Added Chunk Data: - int addedRowsIncludedOffset = 0; - - // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same - if (isSnapshot || !view.addRowKeys.equals(rowsAdded.original)) { - addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(view.addRowKeys, metadata); - } - - // now add mod-column streams, and write the mod column indexes - TIntArrayList modOffsets = new TIntArrayList(modColumnData.length); - for (final ModColumnData mcd : modColumnData) { - final int myModRowOffset; - if (view.keyspaceViewport != null) { - myModRowOffset = mcd.rowsModified.addToFlatBuffer(view.keyspaceViewport, metadata); - } else { - myModRowOffset = mcd.rowsModified.addToFlatBuffer(metadata); - } - modOffsets.add(BarrageModColumnMetadata.createBarrageModColumnMetadata(metadata, myModRowOffset)); - } - - BarrageUpdateMetadata.startModColumnNodesVector(metadata, modOffsets.size()); - modOffsets.forEachDescending(offset -> { - metadata.addOffset(offset); - return true; - }); - final int nodesOffset = metadata.endVector(); - - BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); - BarrageUpdateMetadata.addIsSnapshot(metadata, isSnapshot); - BarrageUpdateMetadata.addFirstSeq(metadata, firstSeq); - BarrageUpdateMetadata.addLastSeq(metadata, lastSeq); - BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); - BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); - BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); - BarrageUpdateMetadata.addRemovedRows(metadata, rowsRemovedOffset); - BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); - BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); - BarrageUpdateMetadata.addModColumnNodes(metadata, nodesOffset); - BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, view.reverseViewport); - metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); - - final FlatBufferBuilder header = new FlatBufferBuilder(); - final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); - BarrageMessageWrapper.startBarrageMessageWrapper(header); - BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); - BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); - BarrageMessageWrapper.addMsgPayload(header, payloadOffset); - header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); - - return header.dataBuffer().slice(); - } - - private ByteBuffer getSnapshotMetadata(final SnapshotView view) throws IOException { - final FlatBufferBuilder metadata = new FlatBufferBuilder(); - - int effectiveViewportOffset = 0; - if (view.isViewport()) { - try (final RowSetGenerator viewportGen = new RowSetGenerator(view.viewport)) { - effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); - } - } - - int effectiveColumnSetOffset = 0; - if (view.subscribedColumns != null) { - effectiveColumnSetOffset = new BitSetGenerator(view.subscribedColumns).addToFlatBuffer(metadata); - } - - final int rowsAddedOffset = rowsAdded.addToFlatBuffer(metadata); - - // no shifts in a snapshot, but need to provide a valid structure - final int shiftDataOffset = shifted.addToFlatBuffer(metadata); - - // Added Chunk Data: - int addedRowsIncludedOffset = 0; - // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same - if (isSnapshot || !view.addRowKeys.equals(rowsAdded.original)) { - addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(view.addRowKeys, metadata); - } - - BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); - BarrageUpdateMetadata.addIsSnapshot(metadata, isSnapshot); - BarrageUpdateMetadata.addFirstSeq(metadata, firstSeq); - BarrageUpdateMetadata.addLastSeq(metadata, lastSeq); - BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); - BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); - BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); - BarrageUpdateMetadata.addRemovedRows(metadata, 0); - BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); - BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); - BarrageUpdateMetadata.addModColumnNodes(metadata, 0); - BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, view.reverseViewport); - metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); - - final FlatBufferBuilder header = new FlatBufferBuilder(); - final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); - BarrageMessageWrapper.startBarrageMessageWrapper(header); - BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); - BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); - BarrageMessageWrapper.addMsgPayload(header, payloadOffset); - header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); - - return header.dataBuffer().slice(); - } - public static abstract class ByteArrayGenerator { protected int len; protected byte[] raw; From d4b7d568e83ec13a1f3afba390b6ca7dcdd35c10 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 30 May 2024 16:22:10 -0500 Subject: [PATCH 031/219] Rename mod col generator, let it close its owned resources --- .../barrage/BarrageStreamGeneratorImpl.java | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 8c405696da6..5fd243c1a31 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -129,14 +129,20 @@ protected void writeHeader( } } - public static class ModColumnData { + public static class ModColumnGenerator implements SafeCloseable { private final RowSetGenerator rowsModified; private final ChunkListInputStreamGenerator data; - ModColumnData(final BarrageMessage.ModColumnData col) throws IOException { + ModColumnGenerator(final BarrageMessage.ModColumnData col) throws IOException { rowsModified = new RowSetGenerator(col.rowsModified); data = new ChunkListInputStreamGenerator(col.type, col.componentType, col.data, col.chunkType); } + + @Override + public void close() { + rowsModified.close(); + data.close(); + } } private final BarrageMessage message; @@ -153,7 +159,7 @@ public static class ModColumnData { private final RowSetShiftDataGenerator shifted; private final ChunkListInputStreamGenerator[] addColumnData; - private final ModColumnData[] modColumnData; + private final ModColumnGenerator[] modColumnData; /** * Create a barrage stream generator that can slice and dice the barrage message for delivery to clients. @@ -182,9 +188,9 @@ public BarrageStreamGeneratorImpl(final BarrageMessage message, columnData.data, columnData.chunkType); } - modColumnData = new ModColumnData[message.modColumnData.length]; + modColumnData = new ModColumnGenerator[message.modColumnData.length]; for (int i = 0; i < modColumnData.length; ++i) { - modColumnData[i] = new ModColumnData(message.modColumnData[i]); + modColumnData[i] = new ModColumnGenerator(message.modColumnData[i]); } } catch (final IOException e) { throw new UncheckedDeephavenException("unexpected IOException while creating barrage message stream", e); @@ -207,15 +213,10 @@ public void close() { rowsRemoved.close(); if (addColumnData != null) { - for (final ChunkListInputStreamGenerator in : addColumnData) { - in.close(); - } + SafeCloseable.closeAll(addColumnData); } if (modColumnData != null) { - for (final ModColumnData mcd : modColumnData) { - mcd.rowsModified.close(); - mcd.data.close(); - } + SafeCloseable.closeAll(modColumnData); } } @@ -288,7 +289,7 @@ public SubView(final BarrageSubscriptionOptions options, // precompute the modified column indexes, and calculate total rows needed long numModRows = 0; for (int ii = 0; ii < modColumnData.length; ++ii) { - final ModColumnData mcd = modColumnData[ii]; + final ModColumnGenerator mcd = modColumnData[ii]; if (keyspaceViewport != null) { try (WritableRowSet intersect = keyspaceViewport.intersect(mcd.rowsModified.original)) { @@ -428,7 +429,7 @@ private ByteBuffer getSubscriptionMetadata() throws IOException { // now add mod-column streams, and write the mod column indexes TIntArrayList modOffsets = new TIntArrayList(modColumnData.length); - for (final ModColumnData mcd : modColumnData) { + for (final ModColumnGenerator mcd : modColumnData) { final int myModRowOffset; if (keyspaceViewport != null) { myModRowOffset = mcd.rowsModified.addToFlatBuffer(keyspaceViewport, metadata); @@ -1008,7 +1009,7 @@ private int appendModColumns(final View view, final long startRange, final int t // adjust the batch size if we would cross a chunk boundary for (int ii = 0; ii < modColumnData.length; ++ii) { - final ModColumnData mcd = modColumnData[ii]; + final ModColumnGenerator mcd = modColumnData[ii]; final List generators = mcd.data.generators(); if (generators.isEmpty()) { continue; @@ -1029,7 +1030,7 @@ private int appendModColumns(final View view, final long startRange, final int t // now add mod-column streams, and write the mod column indexes long numRows = 0; for (int ii = 0; ii < modColumnData.length; ++ii) { - final ModColumnData mcd = modColumnData[ii]; + final ModColumnGenerator mcd = modColumnData[ii]; final ChunkInputStreamGenerator generator = mcd.data.generators().isEmpty() ? null : mcd.data.generators().get(columnChunkIdx[ii]); From 9e265d7e518bddd48e4c9cd790502a00131c1335 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 30 May 2024 16:24:49 -0500 Subject: [PATCH 032/219] Use FlatBufferBuilder's own createByteVector --- .../barrage/BarrageStreamGeneratorImpl.java | 21 +++---------------- 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 5fd243c1a31..ebc617d28b3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -824,21 +824,6 @@ protected void writeHeader( cos.flush(); } - private static int createByteVector(final FlatBufferBuilder builder, final byte[] data, final int offset, - final int length) { - builder.startVector(1, length, 1); - - if (length > 0) { - builder.prep(1, length - 1); - - for (int i = length - 1; i >= 0; --i) { - builder.putByte(data[offset + i]); - } - } - - return builder.endVector(); - } - private void processBatches(Consumer visitor, final View view, final long numRows, final int maxBatchSize, ByteBuffer metadata, final ColumnVisitor columnVisitor, final MutableLong bytesWritten) throws IOException { @@ -1111,7 +1096,7 @@ public static abstract class ByteArrayGenerator { protected byte[] raw; protected int addToFlatBuffer(final FlatBufferBuilder builder) { - return createByteVector(builder, raw, 0, len); + return builder.createByteVector(raw, 0, len); } } @@ -1163,7 +1148,7 @@ protected int addToFlatBuffer(final RowSet viewport, final FlatBufferBuilder bui nlen = baos.size(); } - return createByteVector(builder, nraw, 0, nlen); + return builder.createByteVector(nraw, 0, nlen); } } @@ -1185,7 +1170,7 @@ public int addToFlatBuffer(final BitSet mine, final FlatBufferBuilder builder) t final byte[] nraw = mine.toByteArray(); final int nBits = mine.previousSetBit(Integer.MAX_VALUE - 1) + 1; final int nlen = (int) ((long) nBits + 7) / 8; - return createByteVector(builder, nraw, 0, nlen); + return builder.createByteVector(nraw, 0, nlen); } } From 1f17556a147c655f13d8c81a0904f28fa649c17b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 31 May 2024 12:42:33 -0500 Subject: [PATCH 033/219] Draw clearer lines with BSG contract, avoid referencing Impl --- .../barrage/BarrageStreamGenerator.java | 15 ++-- .../barrage/BarrageStreamGeneratorImpl.java | 87 +++++-------------- .../extensions/barrage/util/BarrageUtil.java | 15 ++-- .../barrage/util/TableToArrowConverter.java | 5 +- .../server/arrow/ArrowFlightUtil.java | 19 ++-- .../deephaven/server/arrow/ArrowModule.java | 10 ++- .../server/arrow/FlightServiceGrpcImpl.java | 5 +- .../barrage/BarrageMessageProducer.java | 52 ++++++----- .../HierarchicalTableViewSubscription.java | 17 ++-- .../server/barrage/BarrageBlinkTableTest.java | 7 +- .../barrage/BarrageMessageRoundTripTest.java | 17 ++-- 11 files changed, 102 insertions(+), 147 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java index 08f91e3665d..730f8700781 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java @@ -10,25 +10,30 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.io.InputStream; import java.util.BitSet; +import java.util.function.Consumer; import java.util.function.ToIntFunction; /** * A StreamGenerator takes a BarrageMessage and re-uses portions of the serialized payload across different subscribers * that may subscribe to different viewports and columns. - * - * @param The sub-view type that the listener expects to receive. */ -public interface BarrageStreamGenerator extends SafeCloseable { +public interface BarrageStreamGenerator extends SafeCloseable { - interface Factory { + interface MessageView { + void forEachStream(Consumer visitor) throws IOException; + } + + interface Factory { /** * Create a StreamGenerator that now owns the BarrageMessage. * * @param message the message that contains the update that we would like to propagate * @param metricsConsumer a method that can be used to record write metrics */ - BarrageStreamGenerator newGenerator( + BarrageStreamGenerator newGenerator( BarrageMessage message, BarragePerformanceLog.WriteMetricsConsumer metricsConsumer); /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index ebc617d28b3..34162391125 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -57,8 +57,7 @@ import static io.deephaven.extensions.barrage.chunk.BaseChunkInputStreamGenerator.PADDING_BUFFER; import static io.deephaven.proto.flight.util.MessageHelper.toIpcBytes; -public class BarrageStreamGeneratorImpl implements - BarrageStreamGenerator { +public class BarrageStreamGeneratorImpl implements BarrageStreamGenerator { private static final Logger log = LoggerFactory.getLogger(BarrageStreamGeneratorImpl.class); // NB: This should likely be something smaller, such as 1<<16, but since the js api is not yet able @@ -75,37 +74,30 @@ public class BarrageStreamGeneratorImpl implements .getIntegerForClassWithDefault(BarrageStreamGeneratorImpl.class, "maxOutboundMessageSize", 100 * 1024 * 1024); - public interface View { - void forEachStream(Consumer visitor) throws IOException; - + public interface RecordBatchMessageView extends MessageView { boolean isViewport(); StreamReaderOptions options(); - int clientMaxMessageSize(); - RowSet addRowOffsets(); RowSet modRowOffsets(int col); } - public static class Factory - implements BarrageStreamGenerator.Factory { - public Factory() {} - + public static class Factory implements BarrageStreamGenerator.Factory { @Override - public BarrageStreamGenerator newGenerator( + public BarrageStreamGenerator newGenerator( final BarrageMessage message, final BarragePerformanceLog.WriteMetricsConsumer metricsConsumer) { return new BarrageStreamGeneratorImpl(message, metricsConsumer); } @Override - public View getSchemaView(@NotNull final ToIntFunction schemaPayloadWriter) { + public MessageView getSchemaView(@NotNull final ToIntFunction schemaPayloadWriter) { final FlatBufferBuilder builder = new FlatBufferBuilder(); final int schemaOffset = schemaPayloadWriter.applyAsInt(builder); builder.finish(MessageHelper.wrapInMessage(builder, schemaOffset, org.apache.arrow.flatbuf.MessageHeader.Schema)); - return new SchemaView(builder.dataBuffer()); + return new SchemaMessageView(builder.dataBuffer()); } } @@ -114,7 +106,7 @@ public View getSchemaView(@NotNull final ToIntFunction schema */ public static class ArrowFactory extends Factory { @Override - public BarrageStreamGenerator newGenerator( + public BarrageStreamGenerator newGenerator( BarrageMessage message, BarragePerformanceLog.WriteMetricsConsumer metricsConsumer) { return new BarrageStreamGeneratorImpl(message, metricsConsumer) { @Override @@ -232,7 +224,7 @@ public void close() { * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ @Override - public SubView getSubView(final BarrageSubscriptionOptions options, + public MessageView getSubView(final BarrageSubscriptionOptions options, final boolean isInitialSnapshot, @Nullable final RowSet viewport, final boolean reverseViewport, @@ -250,11 +242,11 @@ public SubView getSubView(final BarrageSubscriptionOptions options, * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ @Override - public SubView getSubView(BarrageSubscriptionOptions options, boolean isInitialSnapshot) { + public MessageView getSubView(BarrageSubscriptionOptions options, boolean isInitialSnapshot) { return getSubView(options, isInitialSnapshot, null, false, null, null); } - public final class SubView implements View { + private final class SubView implements RecordBatchMessageView { private final BarrageSubscriptionOptions options; private final boolean isInitialSnapshot; private final RowSet viewport; @@ -365,11 +357,6 @@ private int batchSize() { return batchSize; } - @Override - public int clientMaxMessageSize() { - return options.maxMessageSize(); - } - @Override public boolean isViewport() { return viewport != null; @@ -483,7 +470,7 @@ private ByteBuffer getSubscriptionMetadata() throws IOException { * @return a MessageView filtered by the snapshot properties that can be sent to that subscriber */ @Override - public SnapshotView getSnapshotView(final BarrageSnapshotOptions options, + public MessageView getSnapshotView(final BarrageSnapshotOptions options, @Nullable final RowSet viewport, final boolean reverseViewport, @Nullable final RowSet keyspaceViewport, @@ -498,11 +485,11 @@ public SnapshotView getSnapshotView(final BarrageSnapshotOptions options, * @return a MessageView filtered by the snapshot properties that can be sent to that subscriber */ @Override - public SnapshotView getSnapshotView(BarrageSnapshotOptions options) { + public MessageView getSnapshotView(BarrageSnapshotOptions options) { return getSnapshotView(options, null, false, null, null); } - public final class SnapshotView implements View { + private final class SnapshotView implements RecordBatchMessageView { private final BarrageSnapshotOptions options; private final RowSet viewport; private final boolean reverseViewport; @@ -565,11 +552,6 @@ private int batchSize() { return batchSize; } - @Override - public int clientMaxMessageSize() { - return options.maxMessageSize(); - } - @Override public boolean isViewport() { return viewport != null; @@ -643,10 +625,10 @@ private ByteBuffer getSnapshotMetadata() throws IOException { } } - public static final class SchemaView implements View { + private static final class SchemaMessageView implements MessageView { private final byte[] msgBytes; - public SchemaView(final ByteBuffer buffer) { + public SchemaMessageView(final ByteBuffer buffer) { this.msgBytes = Flight.FlightData.newBuilder() .setDataHeader(ByteStringAccess.wrap(buffer)) .build() @@ -657,36 +639,11 @@ public SchemaView(final ByteBuffer buffer) { public void forEachStream(Consumer visitor) { visitor.accept(new DrainableByteArrayInputStream(msgBytes, 0, msgBytes.length)); } - - @Override - public boolean isViewport() { - return false; - } - - @Override - public StreamReaderOptions options() { - return null; - } - - @Override - public int clientMaxMessageSize() { - return 0; - } - - @Override - public RowSet addRowOffsets() { - return null; - } - - @Override - public RowSet modRowOffsets(int col) { - return null; - } } @FunctionalInterface private interface ColumnVisitor { - int visit(final View view, final long startRange, final int targetBatchSize, + int visit(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; @@ -704,7 +661,7 @@ int visit(final View view, final long startRange, final int targetBatchSize, * @param columnVisitor the helper method responsible for appending the payload columns to the RecordBatch * @return an InputStream ready to be drained by GRPC */ - private InputStream getInputStream(final View view, final long offset, final int targetBatchSize, + private InputStream getInputStream(final RecordBatchMessageView view, final long offset, final int targetBatchSize, final MutableInt actualBatchSize, final ByteBuffer metadata, final ColumnVisitor columnVisitor) throws IOException { final ArrayDeque streams = new ArrayDeque<>(); @@ -824,7 +781,7 @@ protected void writeHeader( cos.flush(); } - private void processBatches(Consumer visitor, final View view, + private void processBatches(Consumer visitor, final RecordBatchMessageView view, final long numRows, final int maxBatchSize, ByteBuffer metadata, final ColumnVisitor columnVisitor, final MutableLong bytesWritten) throws IOException { long offset = 0; @@ -833,8 +790,8 @@ private void processBatches(Consumer visitor, final View view, int batchSize = Math.min(DEFAULT_INITIAL_BATCH_SIZE, maxBatchSize); // allow the client to override the default message size - final int maxMessageSize = - view.clientMaxMessageSize() > 0 ? view.clientMaxMessageSize() : DEFAULT_MESSAGE_SIZE_LIMIT; + int clientMaxMessageSize = view.options().maxMessageSize(); + final int maxMessageSize = clientMaxMessageSize > 0 ? clientMaxMessageSize : DEFAULT_MESSAGE_SIZE_LIMIT; // TODO (deephaven-core#188): remove this when JS API can accept multiple batches boolean sendAllowed = numRows <= batchSize; @@ -917,7 +874,7 @@ private static int findGeneratorForOffset(final List return low; } - private int appendAddColumns(final View view, final long startRange, final int targetBatchSize, + private int appendAddColumns(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { if (addColumnData.length == 0) { @@ -983,7 +940,7 @@ private int appendAddColumns(final View view, final long startRange, final int t } } - private int appendModColumns(final View view, final long startRange, final int targetBatchSize, + private int appendModColumns(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 2bb0709898a..8175b32bcbb 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -28,7 +28,6 @@ import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.BarrageStreamGenerator; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; @@ -702,13 +701,13 @@ private static Field arrowFieldForVectorType( } public static void createAndSendStaticSnapshot( - BarrageStreamGenerator.Factory streamGeneratorFactory, + BarrageStreamGenerator.Factory streamGeneratorFactory, BaseTable table, BitSet columns, RowSet viewport, boolean reverseViewport, BarrageSnapshotOptions snapshotRequestOptions, - StreamObserver listener, + StreamObserver listener, BarragePerformanceLog.SnapshotMetricsHelper metrics) { // start with small value and grow long snapshotTargetCellCount = MIN_SNAPSHOT_CELL_COUNT; @@ -755,8 +754,7 @@ public static void createAndSendStaticSnapshot( // send out the data. Note that although a `BarrageUpdateMetaData` object will // be provided with each unique snapshot, vanilla Flight clients will ignore // these and see only an incoming stream of batches - try (final BarrageStreamGenerator bsg = - streamGeneratorFactory.newGenerator(msg, metrics)) { + try (final BarrageStreamGenerator bsg = streamGeneratorFactory.newGenerator(msg, metrics)) { if (rsIt.hasMore()) { listener.onNext(bsg.getSnapshotView(snapshotRequestOptions, snapshotViewport, false, @@ -797,11 +795,11 @@ public static void createAndSendStaticSnapshot( } public static void createAndSendSnapshot( - BarrageStreamGenerator.Factory streamGeneratorFactory, + BarrageStreamGenerator.Factory streamGeneratorFactory, BaseTable table, BitSet columns, RowSet viewport, boolean reverseViewport, BarrageSnapshotOptions snapshotRequestOptions, - StreamObserver listener, + StreamObserver listener, BarragePerformanceLog.SnapshotMetricsHelper metrics) { // if the table is static and a full snapshot is requested, we can make and send multiple @@ -828,8 +826,7 @@ public static void createAndSendSnapshot( msg.modColumnData = BarrageMessage.ZERO_MOD_COLUMNS; // no mod column data // translate the viewport to keyspace and make the call - try (final BarrageStreamGenerator bsg = - streamGeneratorFactory.newGenerator(msg, metrics); + try (final BarrageStreamGenerator bsg = streamGeneratorFactory.newGenerator(msg, metrics); final RowSet keySpaceViewport = viewport != null ? msg.rowsAdded.subSetForPositions(viewport, reverseViewport) : null) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java index c3ccb4df4c3..88cb365980c 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/TableToArrowConverter.java @@ -5,6 +5,7 @@ import io.deephaven.engine.table.impl.BaseTable; import io.deephaven.extensions.barrage.BarragePerformanceLog; +import io.deephaven.extensions.barrage.BarrageStreamGenerator; import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.grpc.Drainable; import io.grpc.stub.StreamObserver; @@ -58,11 +59,11 @@ public byte[] next() { return listener.batchMessages.pop(); } - private static class ArrowBuilderObserver implements StreamObserver { + private static class ArrowBuilderObserver implements StreamObserver { final Deque batchMessages = new ArrayDeque<>(); @Override - public void onNext(final BarrageStreamGeneratorImpl.View messageView) { + public void onNext(final BarrageStreamGenerator.MessageView messageView) { try { messageView.forEachStream(inputStream -> { try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java index 27466f2729f..12162bceda9 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java @@ -38,7 +38,6 @@ import io.deephaven.proto.util.Exceptions; import io.deephaven.proto.util.ExportTicketHelper; import io.deephaven.server.barrage.BarrageMessageProducer; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.server.hierarchicaltable.HierarchicalTableView; import io.deephaven.server.hierarchicaltable.HierarchicalTableViewSubscription; import io.deephaven.server.session.SessionService; @@ -67,7 +66,7 @@ public class ArrowFlightUtil { Configuration.getInstance().getIntegerWithDefault("barrage.minUpdateInterval", 1000); public static void DoGetCustom( - final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, final SessionState session, final TicketRouter ticketRouter, final Flight.Ticket request, @@ -105,7 +104,7 @@ public static void DoGetCustom( metrics.tableKey = BarragePerformanceLog.getKeyFor(table); // create an adapter for the response observer - final StreamObserver listener = + final StreamObserver listener = ArrowModule.provideListenerAdapter().adapt(observer); // push the schema to the listener @@ -327,15 +326,15 @@ public interface Factory { private final String myPrefix; private final SessionState session; - private final StreamObserver listener; + private final StreamObserver listener; private boolean isClosed = false; private boolean isFirstMsg = true; private final TicketRouter ticketRouter; - private final BarrageStreamGenerator.Factory streamGeneratorFactory; - private final BarrageMessageProducer.Operation.Factory bmpOperationFactory; + private final BarrageStreamGenerator.Factory streamGeneratorFactory; + private final BarrageMessageProducer.Operation.Factory bmpOperationFactory; private final HierarchicalTableViewSubscription.Factory htvsFactory; private final BarrageMessageProducer.Adapter subscriptionOptAdapter; private final BarrageMessageProducer.Adapter snapshotOptAdapter; @@ -353,10 +352,10 @@ interface Handler extends Closeable { @AssistedInject public DoExchangeMarshaller( final TicketRouter ticketRouter, - final BarrageStreamGenerator.Factory streamGeneratorFactory, - final BarrageMessageProducer.Operation.Factory bmpOperationFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageMessageProducer.Operation.Factory bmpOperationFactory, final HierarchicalTableViewSubscription.Factory htvsFactory, - final BarrageMessageProducer.Adapter, StreamObserver> listenerAdapter, + final BarrageMessageProducer.Adapter, StreamObserver> listenerAdapter, final BarrageMessageProducer.Adapter subscriptionOptAdapter, final BarrageMessageProducer.Adapter snapshotOptAdapter, final SessionService.ErrorTransformer errorTransformer, @@ -612,7 +611,7 @@ public void close() { private class SubscriptionRequestHandler implements Handler { - private BarrageMessageProducer bmp; + private BarrageMessageProducer bmp; private HierarchicalTableViewSubscription htvs; private Queue preExportSubscriptions; diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java b/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java index 727bcf51368..5bbd59dfd67 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java @@ -32,17 +32,19 @@ public abstract class ArrowModule { @IntoSet abstract BindableService bindBrowserFlightServiceBinding(BrowserFlightServiceGrpcBinding service); - @Provides + @Binds @Singleton - static BarrageStreamGenerator.Factory bindStreamGenerator() { + static BarrageStreamGenerator.Factory bindStreamGenerator() { return new BarrageStreamGeneratorImpl.Factory(); } + + // TODO before commit, try getting rid of this @Provides - static BarrageMessageProducer.Adapter, StreamObserver> provideListenerAdapter() { + static BarrageMessageProducer.Adapter, StreamObserver> provideListenerAdapter() { return delegate -> new StreamObserver<>() { @Override - public void onNext(final BarrageStreamGeneratorImpl.View view) { + public void onNext(final BarrageStreamGenerator.MessageView view) { try { synchronized (delegate) { view.forEachStream(delegate::onNext); diff --git a/server/src/main/java/io/deephaven/server/arrow/FlightServiceGrpcImpl.java b/server/src/main/java/io/deephaven/server/arrow/FlightServiceGrpcImpl.java index ca2df8a7827..f290dc75860 100644 --- a/server/src/main/java/io/deephaven/server/arrow/FlightServiceGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/arrow/FlightServiceGrpcImpl.java @@ -19,7 +19,6 @@ import io.deephaven.io.logger.Logger; import io.deephaven.proto.backplane.grpc.ExportNotification; import io.deephaven.proto.backplane.grpc.WrappedAuthenticationRequest; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.proto.util.Exceptions; import io.deephaven.server.session.SessionService; import io.deephaven.server.session.SessionState; @@ -45,7 +44,7 @@ public class FlightServiceGrpcImpl extends FlightServiceGrpc.FlightServiceImplBa private static final Logger log = LoggerFactory.getLogger(FlightServiceGrpcImpl.class); private final ScheduledExecutorService executorService; - private final BarrageStreamGenerator.Factory streamGeneratorFactory; + private final BarrageStreamGenerator.Factory streamGeneratorFactory; private final SessionService sessionService; private final SessionService.ErrorTransformer errorTransformer; private final TicketRouter ticketRouter; @@ -56,7 +55,7 @@ public class FlightServiceGrpcImpl extends FlightServiceGrpc.FlightServiceImplBa @Inject public FlightServiceGrpcImpl( @Nullable final ScheduledExecutorService executorService, - final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, final SessionService sessionService, final SessionService.ErrorTransformer errorTransformer, final TicketRouter ticketRouter, diff --git a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java index d62d56d0445..518ed1ab699 100644 --- a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java +++ b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java @@ -81,10 +81,8 @@ * inside the same JVM. *

* The client-side counterpart of this is the {@link StreamReader}. - * - * @param The sub-view type that the listener expects to receive. */ -public class BarrageMessageProducer extends LivenessArtifact +public class BarrageMessageProducer extends LivenessArtifact implements DynamicNode, NotificationStepReceiver { private static final int DELTA_CHUNK_SIZE = Configuration.getInstance().getIntegerForClassWithDefault( BarrageMessageProducer.class, "deltaChunkSize", ChunkPoolConstants.LARGEST_POOLED_CHUNK_CAPACITY); @@ -108,17 +106,17 @@ public interface Adapter { V adapt(T t); } - public static class Operation - implements QueryTable.MemoizableOperation> { + public static class Operation + implements QueryTable.MemoizableOperation { @AssistedFactory - public interface Factory { - Operation create(BaseTable parent, long updateIntervalMs); + public interface Factory { + Operation create(BaseTable parent, long updateIntervalMs); } private final Scheduler scheduler; private final SessionService.ErrorTransformer errorTransformer; - private final BarrageStreamGenerator.Factory streamGeneratorFactory; + private final BarrageStreamGenerator.Factory streamGeneratorFactory; private final BaseTable parent; private final long updateIntervalMs; private final Runnable onGetSnapshot; @@ -127,7 +125,7 @@ public interface Factory { public Operation( final Scheduler scheduler, final SessionService.ErrorTransformer errorTransformer, - final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, @Assisted final BaseTable parent, @Assisted final long updateIntervalMs) { this(scheduler, errorTransformer, streamGeneratorFactory, parent, updateIntervalMs, null); @@ -137,7 +135,7 @@ public Operation( public Operation( final Scheduler scheduler, final SessionService.ErrorTransformer errorTransformer, - final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, final BaseTable parent, final long updateIntervalMs, @Nullable final Runnable onGetSnapshot) { @@ -165,10 +163,10 @@ public MemoizedOperationKey getMemoizedOperationKey() { } @Override - public Result> initialize(final boolean usePrev, + public Result initialize(final boolean usePrev, final long beforeClock) { - final BarrageMessageProducer result = new BarrageMessageProducer( - scheduler, errorTransformer, streamGeneratorFactory, parent, updateIntervalMs, onGetSnapshot); + final BarrageMessageProducer result = new BarrageMessageProducer(scheduler, errorTransformer, + streamGeneratorFactory, parent, updateIntervalMs, onGetSnapshot); return new Result<>(result, result.constructListener()); } } @@ -199,7 +197,7 @@ public int hashCode() { private final String logPrefix; private final Scheduler scheduler; private final SessionService.ErrorTransformer errorTransformer; - private final BarrageStreamGenerator.Factory streamGeneratorFactory; + private final BarrageStreamGenerator.Factory streamGeneratorFactory; private final BaseTable parent; private final long updateIntervalMs; @@ -308,7 +306,7 @@ public void close() { public BarrageMessageProducer( final Scheduler scheduler, final SessionService.ErrorTransformer errorTransformer, - final BarrageStreamGenerator.Factory streamGeneratorFactory, + final BarrageStreamGenerator.Factory streamGeneratorFactory, final BaseTable parent, final long updateIntervalMs, final Runnable onGetSnapshot) { @@ -415,7 +413,7 @@ public void setOnGetSnapshot(Runnable onGetSnapshot, boolean isPreSnap) { */ private class Subscription { final BarrageSubscriptionOptions options; - final StreamObserver listener; + final StreamObserver listener; final String logPrefix; RowSet viewport; // active viewport @@ -445,7 +443,7 @@ private class Subscription { WritableRowSet growingIncrementalViewport = null; // rows to be sent to the client from the current snapshot boolean isFirstSnapshot; // is this the first snapshot after a change to a subscriptions - private Subscription(final StreamObserver listener, + private Subscription(final StreamObserver listener, final BarrageSubscriptionOptions options, final BitSet subscribedColumns, @Nullable final RowSet initialViewport, @@ -473,7 +471,7 @@ public boolean isViewport() { * @param columnsToSubscribe The initial columns to subscribe to * @param initialViewport Initial viewport, to be owned by the subscription */ - public void addSubscription(final StreamObserver listener, + public void addSubscription(final StreamObserver listener, final BarrageSubscriptionOptions options, @Nullable final BitSet columnsToSubscribe, @Nullable final RowSet initialViewport, @@ -518,7 +516,7 @@ public void addSubscription(final StreamObserver listener, } } - private boolean findAndUpdateSubscription(final StreamObserver listener, + private boolean findAndUpdateSubscription(final StreamObserver listener, final Consumer updateSubscription) { final Function, Boolean> findAndUpdate = (List subscriptions) -> { for (final Subscription sub : subscriptions) { @@ -546,13 +544,14 @@ private boolean findAndUpdateSubscription(final StreamObserver list } } - public boolean updateSubscription(final StreamObserver listener, + public boolean updateSubscription(final StreamObserver listener, @Nullable final RowSet newViewport, @Nullable final BitSet columnsToSubscribe) { // assume forward viewport when not specified return updateSubscription(listener, newViewport, columnsToSubscribe, false); } - public boolean updateSubscription(final StreamObserver listener, @Nullable final RowSet newViewport, + public boolean updateSubscription(final StreamObserver listener, + @Nullable final RowSet newViewport, @Nullable final BitSet columnsToSubscribe, final boolean newReverseViewport) { return findAndUpdateSubscription(listener, sub -> { if (sub.pendingViewport != null) { @@ -582,7 +581,7 @@ public boolean updateSubscription(final StreamObserver listener, @N }); } - public void removeSubscription(final StreamObserver listener) { + public void removeSubscription(final StreamObserver listener) { findAndUpdateSubscription(listener, sub -> { sub.pendingDelete = true; if (log.isDebugEnabled()) { @@ -1457,7 +1456,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { } if (snapshot != null) { - try (final BarrageStreamGenerator snapshotGenerator = + try (final BarrageStreamGenerator snapshotGenerator = streamGeneratorFactory.newGenerator(snapshot, this::recordWriteMetrics)) { if (log.isDebugEnabled()) { log.debug().append(logPrefix).append("Sending snapshot to ").append(activeSubscriptions.size()) @@ -1515,7 +1514,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { private void propagateToSubscribers(final BarrageMessage message, final RowSet propRowSetForMessage) { // message is released via transfer to stream generator (as it must live until all view's are closed) - try (final BarrageStreamGenerator generator = streamGeneratorFactory.newGenerator( + try (final BarrageStreamGenerator generator = streamGeneratorFactory.newGenerator( message, this::recordWriteMetrics)) { for (final Subscription subscription : activeSubscriptions) { if (subscription.pendingInitialSnapshot || subscription.pendingDelete) { @@ -1567,9 +1566,8 @@ private void clearObjectDeltaColumns(@NotNull final BitSet objectColumnsToClear) } } - private void propagateSnapshotForSubscription( - final Subscription subscription, - final BarrageStreamGenerator snapshotGenerator) { + private void propagateSnapshotForSubscription(final Subscription subscription, + final BarrageStreamGenerator snapshotGenerator) { boolean needsSnapshot = subscription.pendingInitialSnapshot; // This is a little confusing, but by the time we propagate, the `snapshotViewport`/`snapshotColumns` objects diff --git a/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java b/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java index 695c2b73ac5..4bf7f7e52b4 100644 --- a/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java +++ b/server/src/main/java/io/deephaven/server/hierarchicaltable/HierarchicalTableViewSubscription.java @@ -34,7 +34,6 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.BitSet; @@ -55,7 +54,7 @@ public class HierarchicalTableViewSubscription extends LivenessArtifact { public interface Factory { HierarchicalTableViewSubscription create( HierarchicalTableView view, - StreamObserver listener, + StreamObserver listener, BarrageSubscriptionOptions subscriptionOptions, long intervalMillis); } @@ -64,10 +63,10 @@ HierarchicalTableViewSubscription create( private final Scheduler scheduler; private final SessionService.ErrorTransformer errorTransformer; - private final BarrageStreamGenerator.Factory streamGeneratorFactory; + private final BarrageStreamGenerator.Factory streamGeneratorFactory; private final HierarchicalTableView view; - private final StreamObserver listener; + private final StreamObserver listener; private final BarrageSubscriptionOptions subscriptionOptions; private final long intervalDurationNanos; @@ -106,9 +105,9 @@ private enum State { public HierarchicalTableViewSubscription( @NotNull final Scheduler scheduler, @NotNull final SessionService.ErrorTransformer errorTransformer, - @NotNull final BarrageStreamGenerator.Factory streamGeneratorFactory, + @NotNull final BarrageStreamGenerator.Factory streamGeneratorFactory, @Assisted @NotNull final HierarchicalTableView view, - @Assisted @NotNull final StreamObserver listener, + @Assisted @NotNull final StreamObserver listener, @Assisted @NotNull final BarrageSubscriptionOptions subscriptionOptions, @Assisted final long intervalDurationMillis) { this.scheduler = scheduler; @@ -293,8 +292,8 @@ private void process() { } private static long buildAndSendSnapshot( - @NotNull final BarrageStreamGenerator.Factory streamGeneratorFactory, - @NotNull final StreamObserver listener, + @NotNull final BarrageStreamGenerator.Factory streamGeneratorFactory, + @NotNull final StreamObserver listener, @NotNull final BarrageSubscriptionOptions subscriptionOptions, @NotNull final HierarchicalTableView view, @NotNull final LongConsumer snapshotNanosConsumer, @@ -356,7 +355,7 @@ private static long buildAndSendSnapshot( barrageMessage.modColumnData = BarrageMessage.ZERO_MOD_COLUMNS; // 5. Send the BarrageMessage - final BarrageStreamGenerator streamGenerator = + final BarrageStreamGenerator streamGenerator = streamGeneratorFactory.newGenerator(barrageMessage, writeMetricsConsumer); // Note that we're always specifying "isInitialSnapshot=true". This is to provoke the subscription view to // send the added rows on every snapshot, since (1) our added rows are flat, and thus cheap to send, and diff --git a/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java b/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java index bce51e45681..dcb7445077e 100644 --- a/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java +++ b/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java @@ -28,7 +28,6 @@ import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.extensions.barrage.BarrageStreamGenerator; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.extensions.barrage.util.BarrageStreamReader; @@ -63,7 +62,7 @@ public class BarrageBlinkTableTest extends RefreshingTableTestCase { private QueryTable sourceTable; private TrackingWritableRowSet blinkRowSet; private QueryTable blinkTable; - private BarrageMessageProducer barrageMessageProducer; + private BarrageMessageProducer barrageMessageProducer; private TableUpdateValidator originalTUV; private FailureListener originalTUVListener; @@ -72,7 +71,7 @@ public class BarrageBlinkTableTest extends RefreshingTableTestCase { ArrowModule.class }) public interface TestComponent { - BarrageStreamGenerator.Factory getStreamGeneratorFactory(); + BarrageStreamGenerator.Factory getStreamGeneratorFactory(); @Component.Builder interface Builder { @@ -101,7 +100,7 @@ public void setUp() throws Exception { blinkTable.setRefreshing(true); blinkTable.setAttribute(Table.BLINK_TABLE_ATTRIBUTE, true); - barrageMessageProducer = blinkTable.getResult(new BarrageMessageProducer.Operation<>( + barrageMessageProducer = blinkTable.getResult(new BarrageMessageProducer.Operation( scheduler, new SessionService.ObfuscatingErrorTransformer(), daggerRoot.getStreamGeneratorFactory(), blinkTable, UPDATE_INTERVAL, () -> { })); diff --git a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java index 5f3351f8f04..314cd1db623 100644 --- a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java +++ b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java @@ -27,7 +27,6 @@ import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.extensions.barrage.BarrageStreamGenerator; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.extensions.barrage.util.BarrageProtoUtil; @@ -76,7 +75,7 @@ public class BarrageMessageRoundTripTest extends RefreshingTableTestCase { ArrowModule.class }) public interface TestComponent { - BarrageStreamGenerator.Factory getStreamGeneratorFactory(); + BarrageStreamGenerator.Factory getStreamGeneratorFactory(); @Component.Builder interface Builder { @@ -149,7 +148,7 @@ private class RemoteClient { private final BarrageTable barrageTable; @ReferentialIntegrity - private final BarrageMessageProducer barrageMessageProducer; + private final BarrageMessageProducer barrageMessageProducer; @ReferentialIntegrity private final TableUpdateValidator replicatedTUV; @@ -163,14 +162,14 @@ private class RemoteClient { // The replicated table's TableUpdateValidator will be confused if the table is a viewport. Instead we rely on // comparing the producer table to the consumer table to validate contents are correct. RemoteClient(final RowSet viewport, final BitSet subscribedColumns, - final BarrageMessageProducer barrageMessageProducer, + final BarrageMessageProducer barrageMessageProducer, final Table sourceTable, final String name) { // assume a forward viewport when not specified this(viewport, subscribedColumns, barrageMessageProducer, sourceTable, name, false, false); } RemoteClient(final RowSet viewport, final BitSet subscribedColumns, - final BarrageMessageProducer barrageMessageProducer, + final BarrageMessageProducer barrageMessageProducer, final Table sourceTable, final String name, final boolean reverseViewport, final boolean deferSubscription) { this.viewport = viewport; @@ -342,7 +341,7 @@ private class RemoteNugget implements EvalNuggetInterface { private final QueryTable originalTable; @ReferentialIntegrity - private final BarrageMessageProducer barrageMessageProducer; + private final BarrageMessageProducer barrageMessageProducer; @ReferentialIntegrity private final TableUpdateValidator originalTUV; @@ -354,7 +353,7 @@ private class RemoteNugget implements EvalNuggetInterface { RemoteNugget(final Supplier makeTable) { this.makeTable = makeTable; this.originalTable = (QueryTable) makeTable.get(); - this.barrageMessageProducer = originalTable.getResult(new BarrageMessageProducer.Operation<>(scheduler, + this.barrageMessageProducer = originalTable.getResult(new BarrageMessageProducer.Operation(scheduler, new SessionService.ObfuscatingErrorTransformer(), daggerRoot.getStreamGeneratorFactory(), originalTable, UPDATE_INTERVAL, this::onGetSnapshot)); @@ -1410,7 +1409,7 @@ public void createTable() { } } - public static class DummyObserver implements StreamObserver { + public static class DummyObserver implements StreamObserver { volatile boolean completed = false; private final BarrageDataMarshaller marshaller; @@ -1422,7 +1421,7 @@ public static class DummyObserver implements StreamObserver { try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = From cae1551b12be838beeb426bcd41c99df8dc2d625 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 31 May 2024 20:02:47 -0500 Subject: [PATCH 034/219] Revised draft at minimizing use of stream adapters --- .../server/arrow/ArrowFlightUtil.java | 37 ++++++++++++++++-- .../deephaven/server/arrow/ArrowModule.java | 38 +------------------ .../barrage/BarrageMessageProducer.java | 4 +- 3 files changed, 37 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java index 12162bceda9..f25d5f3abb9 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java @@ -62,6 +62,38 @@ public class ArrowFlightUtil { private static final Logger log = LoggerFactory.getLogger(ArrowFlightUtil.class); + private static class MessageViewAdapter implements StreamObserver { + private final StreamObserver delegate; + + private MessageViewAdapter(StreamObserver delegate) { + this.delegate = delegate; + } + + public void onNext(BarrageStreamGenerator.MessageView value) { + synchronized (delegate) { + try { + value.forEachStream(delegate::onNext); + } catch (IOException e) { + throw new UncheckedDeephavenException(e); + } + } + } + + @Override + public void onError(Throwable t) { + synchronized (delegate) { + delegate.onError(t); + } + } + + @Override + public void onCompleted() { + synchronized (delegate) { + delegate.onCompleted(); + } + } + } + public static final int DEFAULT_MIN_UPDATE_INTERVAL_MS = Configuration.getInstance().getIntegerWithDefault("barrage.minUpdateInterval", 1000); @@ -105,7 +137,7 @@ public static void DoGetCustom( // create an adapter for the response observer final StreamObserver listener = - ArrowModule.provideListenerAdapter().adapt(observer); + new MessageViewAdapter(observer); // push the schema to the listener listener.onNext(streamGeneratorFactory.getSchemaView( @@ -355,7 +387,6 @@ public DoExchangeMarshaller( final BarrageStreamGenerator.Factory streamGeneratorFactory, final BarrageMessageProducer.Operation.Factory bmpOperationFactory, final HierarchicalTableViewSubscription.Factory htvsFactory, - final BarrageMessageProducer.Adapter, StreamObserver> listenerAdapter, final BarrageMessageProducer.Adapter subscriptionOptAdapter, final BarrageMessageProducer.Adapter snapshotOptAdapter, final SessionService.ErrorTransformer errorTransformer, @@ -370,7 +401,7 @@ public DoExchangeMarshaller( this.subscriptionOptAdapter = subscriptionOptAdapter; this.snapshotOptAdapter = snapshotOptAdapter; this.session = session; - this.listener = listenerAdapter.adapt(responseObserver); + this.listener = new MessageViewAdapter(responseObserver); this.errorTransformer = errorTransformer; this.session.addOnCloseCallback(this); diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java b/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java index 5bbd59dfd67..7f2b22aa464 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowModule.java @@ -7,7 +7,6 @@ import dagger.Module; import dagger.Provides; import dagger.multibindings.IntoSet; -import io.deephaven.UncheckedDeephavenException; import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; @@ -16,11 +15,8 @@ import io.deephaven.server.barrage.BarrageMessageProducer; import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; import io.grpc.BindableService; -import io.grpc.stub.StreamObserver; import javax.inject.Singleton; -import java.io.IOException; -import java.io.InputStream; @Module public abstract class ArrowModule { @@ -32,44 +28,12 @@ public abstract class ArrowModule { @IntoSet abstract BindableService bindBrowserFlightServiceBinding(BrowserFlightServiceGrpcBinding service); - @Binds + @Provides @Singleton static BarrageStreamGenerator.Factory bindStreamGenerator() { return new BarrageStreamGeneratorImpl.Factory(); } - - // TODO before commit, try getting rid of this - @Provides - static BarrageMessageProducer.Adapter, StreamObserver> provideListenerAdapter() { - return delegate -> new StreamObserver<>() { - @Override - public void onNext(final BarrageStreamGenerator.MessageView view) { - try { - synchronized (delegate) { - view.forEachStream(delegate::onNext); - } - } catch (final IOException ioe) { - throw new UncheckedDeephavenException(ioe); - } - } - - @Override - public void onError(Throwable t) { - synchronized (delegate) { - delegate.onError(t); - } - } - - @Override - public void onCompleted() { - synchronized (delegate) { - delegate.onCompleted(); - } - } - }; - } - @Provides static BarrageMessageProducer.Adapter subscriptionOptAdapter() { return BarrageSubscriptionOptions::of; diff --git a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java index 518ed1ab699..68a138a78b1 100644 --- a/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java +++ b/server/src/main/java/io/deephaven/server/barrage/BarrageMessageProducer.java @@ -516,7 +516,7 @@ public void addSubscription(final StreamObserver listener, + private boolean findAndUpdateSubscription(final StreamObserver listener, final Consumer updateSubscription) { final Function, Boolean> findAndUpdate = (List subscriptions) -> { for (final Subscription sub : subscriptions) { @@ -581,7 +581,7 @@ public boolean updateSubscription(final StreamObserver listener) { + public void removeSubscription(final StreamObserver listener) { findAndUpdateSubscription(listener, sub -> { sub.pendingDelete = true; if (log.isDebugEnabled()) { From 46f5eba19ef9af04e1e6b78a9e57b55ca5d6e8de Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 31 May 2024 16:14:30 -0500 Subject: [PATCH 035/219] Tighten BSG/I's usage of InputStream to DefensiveDrainable --- .../barrage/BarrageStreamGenerator.java | 4 +- .../barrage/BarrageStreamGeneratorImpl.java | 44 +++++++++---------- 2 files changed, 22 insertions(+), 26 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java index 730f8700781..2d3b55fe7fd 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java @@ -6,12 +6,12 @@ import com.google.flatbuffers.FlatBufferBuilder; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.impl.util.BarrageMessage; +import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; -import java.io.InputStream; import java.util.BitSet; import java.util.function.Consumer; import java.util.function.ToIntFunction; @@ -23,7 +23,7 @@ public interface BarrageStreamGenerator extends SafeCloseable { interface MessageView { - void forEachStream(Consumer visitor) throws IOException; + void forEachStream(Consumer visitor) throws IOException; } interface Factory { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 34162391125..7a8518c0b80 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -47,7 +47,6 @@ import org.jetbrains.annotations.Nullable; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.*; @@ -310,7 +309,7 @@ public SubView(final BarrageSubscriptionOptions options, } @Override - public void forEachStream(Consumer visitor) throws IOException { + public void forEachStream(Consumer visitor) throws IOException { final long startTm = System.nanoTime(); ByteBuffer metadata = getSubscriptionMetadata(); MutableLong bytesWritten = new MutableLong(0L); @@ -322,7 +321,7 @@ public void forEachStream(Consumer visitor) throws IOException { if (numAddRows == 0 && numModRows == 0) { // we still need to send a message containing metadata when there are no rows - final InputStream is = getInputStream(this, 0, 0, actualBatchSize, metadata, + final DefensiveDrainable is = getInputStream(this, 0, 0, actualBatchSize, metadata, BarrageStreamGeneratorImpl.this::appendAddColumns); bytesWritten.add(is.available()); visitor.accept(is); @@ -522,7 +521,7 @@ public SnapshotView(final BarrageSnapshotOptions options, } @Override - public void forEachStream(Consumer visitor) throws IOException { + public void forEachStream(Consumer visitor) throws IOException { final long startTm = System.nanoTime(); ByteBuffer metadata = getSnapshotMetadata(); MutableLong bytesWritten = new MutableLong(0L); @@ -636,7 +635,7 @@ public SchemaMessageView(final ByteBuffer buffer) { } @Override - public void forEachStream(Consumer visitor) { + public void forEachStream(Consumer visitor) { visitor.accept(new DrainableByteArrayInputStream(msgBytes, 0, msgBytes.length)); } } @@ -644,7 +643,7 @@ public void forEachStream(Consumer visitor) { @FunctionalInterface private interface ColumnVisitor { int visit(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, - final Consumer addStream, + final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; } @@ -661,13 +660,14 @@ int visit(final RecordBatchMessageView view, final long startRange, final int ta * @param columnVisitor the helper method responsible for appending the payload columns to the RecordBatch * @return an InputStream ready to be drained by GRPC */ - private InputStream getInputStream(final RecordBatchMessageView view, final long offset, final int targetBatchSize, + private DefensiveDrainable getInputStream(final RecordBatchMessageView view, final long offset, + final int targetBatchSize, final MutableInt actualBatchSize, final ByteBuffer metadata, final ColumnVisitor columnVisitor) throws IOException { - final ArrayDeque streams = new ArrayDeque<>(); + final ArrayDeque streams = new ArrayDeque<>(); final MutableInt size = new MutableInt(); - final Consumer addStream = (final InputStream is) -> { + final Consumer addStream = (final DefensiveDrainable is) -> { try { final int sz = is.available(); if (sz == 0) { @@ -755,7 +755,7 @@ private InputStream getInputStream(final RecordBatchMessageView view, final long writeHeader(metadata, size, header, baos); streams.addFirst(new DrainableByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - return new ConsecutiveDrainableStreams(streams.toArray(new InputStream[0])); + return new ConsecutiveDrainableStreams(streams.toArray(new DefensiveDrainable[0])); } catch (final IOException ex) { throw new UncheckedDeephavenException("Unexpected IOException", ex); } @@ -781,7 +781,7 @@ protected void writeHeader( cos.flush(); } - private void processBatches(Consumer visitor, final RecordBatchMessageView view, + private void processBatches(Consumer visitor, final RecordBatchMessageView view, final long numRows, final int maxBatchSize, ByteBuffer metadata, final ColumnVisitor columnVisitor, final MutableLong bytesWritten) throws IOException { long offset = 0; @@ -798,7 +798,7 @@ private void processBatches(Consumer visitor, final RecordBatchMess while (offset < numRows) { try { - final InputStream is = + final DefensiveDrainable is = getInputStream(view, offset, batchSize, actualBatchSize, metadata, columnVisitor); int bytesToWrite = is.available(); @@ -875,7 +875,8 @@ private static int findGeneratorForOffset(final List } private int appendAddColumns(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, - final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { if (addColumnData.length == 0) { return view.addRowOffsets().intSize(); @@ -941,7 +942,7 @@ private int appendAddColumns(final RecordBatchMessageView view, final long start } private int appendModColumns(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, - final Consumer addStream, + final Consumer addStream, final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { int[] columnChunkIdx = new int[modColumnData.length]; @@ -1203,21 +1204,16 @@ public int drainTo(final OutputStream outputStream) throws IOException { } public static class ConsecutiveDrainableStreams extends DefensiveDrainable { - final InputStream[] streams; + final DefensiveDrainable[] streams; - public ConsecutiveDrainableStreams(final InputStream... streams) { + public ConsecutiveDrainableStreams(final @NotNull DefensiveDrainable... streams) { this.streams = streams; - for (final InputStream stream : streams) { - if (!(stream instanceof Drainable)) { - throw new IllegalArgumentException("expecting sub-class of Drainable; found: " + stream.getClass()); - } - } } @Override public int drainTo(final OutputStream outputStream) throws IOException { int total = 0; - for (final InputStream stream : streams) { + for (final DefensiveDrainable stream : streams) { final int expected = total + stream.available(); total += ((Drainable) stream).drainTo(outputStream); if (expected != total) { @@ -1233,7 +1229,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { @Override public int available() throws SizeException, IOException { int total = 0; - for (final InputStream stream : streams) { + for (final DefensiveDrainable stream : streams) { total += stream.available(); if (total < 0) { throw new SizeException("drained message is too large; exceeds Integer.MAX_VALUE", total); @@ -1244,7 +1240,7 @@ public int available() throws SizeException, IOException { @Override public void close() throws IOException { - for (final InputStream stream : streams) { + for (final DefensiveDrainable stream : streams) { try { stream.close(); } catch (final IOException e) { From bc28009bf7f8f2267aa0bdcc09365c48aa580337 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 31 May 2024 16:15:34 -0500 Subject: [PATCH 036/219] Move general util classes to their own types --- .../barrage/BarrageStreamGeneratorImpl.java | 82 ------------------- .../barrage/ConsecutiveDrainableStreams.java | 56 +++++++++++++ .../DrainableByteArrayInputStream.java | 44 ++++++++++ .../barrage/BarrageStreamGeneratorTest.java | 15 ++-- 4 files changed, 107 insertions(+), 90 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ConsecutiveDrainableStreams.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/DrainableByteArrayInputStream.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 7a8518c0b80..b5deafc6c67 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -1169,88 +1169,6 @@ public RowSetShiftDataGenerator(final RowSetShiftData shifted) throws IOExceptio } } - public static class DrainableByteArrayInputStream extends DefensiveDrainable { - - private byte[] buf; - private final int offset; - private final int length; - - public DrainableByteArrayInputStream(final byte[] buf, final int offset, final int length) { - this.buf = Objects.requireNonNull(buf); - this.offset = offset; - this.length = length; - } - - @Override - public int available() { - if (buf == null) { - return 0; - } - return length; - } - - @Override - public int drainTo(final OutputStream outputStream) throws IOException { - if (buf != null) { - try { - outputStream.write(buf, offset, length); - } finally { - buf = null; - } - return length; - } - return 0; - } - } - - public static class ConsecutiveDrainableStreams extends DefensiveDrainable { - final DefensiveDrainable[] streams; - - public ConsecutiveDrainableStreams(final @NotNull DefensiveDrainable... streams) { - this.streams = streams; - } - - @Override - public int drainTo(final OutputStream outputStream) throws IOException { - int total = 0; - for (final DefensiveDrainable stream : streams) { - final int expected = total + stream.available(); - total += ((Drainable) stream).drainTo(outputStream); - if (expected != total) { - throw new IllegalStateException("drained message drained wrong number of bytes"); - } - if (total < 0) { - throw new IllegalStateException("drained message is too large; exceeds Integer.MAX_VALUE"); - } - } - return total; - } - - @Override - public int available() throws SizeException, IOException { - int total = 0; - for (final DefensiveDrainable stream : streams) { - total += stream.available(); - if (total < 0) { - throw new SizeException("drained message is too large; exceeds Integer.MAX_VALUE", total); - } - } - return total; - } - - @Override - public void close() throws IOException { - for (final DefensiveDrainable stream : streams) { - try { - stream.close(); - } catch (final IOException e) { - throw new UncheckedDeephavenException("unexpected IOException", e); - } - } - super.close(); - } - } - private static final class EmptyRowSetGenerator extends RowSetGenerator { public static final EmptyRowSetGenerator INSTANCE; static { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ConsecutiveDrainableStreams.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ConsecutiveDrainableStreams.java new file mode 100644 index 00000000000..441356255d7 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ConsecutiveDrainableStreams.java @@ -0,0 +1,56 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage; + +import io.deephaven.UncheckedDeephavenException; +import io.deephaven.extensions.barrage.util.DefensiveDrainable; +import io.deephaven.util.SafeCloseable; +import io.deephaven.util.datastructures.SizeException; +import io.grpc.Drainable; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.io.OutputStream; + +public class ConsecutiveDrainableStreams extends DefensiveDrainable { + final DefensiveDrainable[] streams; + + public ConsecutiveDrainableStreams(final @NotNull DefensiveDrainable... streams) { + this.streams = streams; + } + + @Override + public int drainTo(final OutputStream outputStream) throws IOException { + int total = 0; + for (final DefensiveDrainable stream : streams) { + final int expected = total + stream.available(); + total += stream.drainTo(outputStream); + if (expected != total) { + throw new IllegalStateException("drained message drained wrong number of bytes"); + } + if (total < 0) { + throw new IllegalStateException("drained message is too large; exceeds Integer.MAX_VALUE"); + } + } + return total; + } + + @Override + public int available() throws SizeException, IOException { + int total = 0; + for (final DefensiveDrainable stream : streams) { + total += stream.available(); + if (total < 0) { + throw new SizeException("drained message is too large; exceeds Integer.MAX_VALUE", total); + } + } + return total; + } + + @Override + public void close() throws IOException { + SafeCloseable.closeAll(streams); + super.close(); + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/DrainableByteArrayInputStream.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/DrainableByteArrayInputStream.java new file mode 100644 index 00000000000..f2b14a7dc44 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/DrainableByteArrayInputStream.java @@ -0,0 +1,44 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage; + +import io.deephaven.extensions.barrage.util.DefensiveDrainable; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Objects; + +public class DrainableByteArrayInputStream extends DefensiveDrainable { + + private byte[] buf; + private final int offset; + private final int length; + + public DrainableByteArrayInputStream(final byte[] buf, final int offset, final int length) { + this.buf = Objects.requireNonNull(buf); + this.offset = offset; + this.length = length; + } + + @Override + public int available() { + if (buf == null) { + return 0; + } + return length; + } + + @Override + public int drainTo(final OutputStream outputStream) throws IOException { + if (buf != null) { + try { + outputStream.write(buf, offset, length); + } finally { + buf = null; + } + return length; + } + return 0; + } +} diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorTest.java index e6c59d7efb8..73be2b851af 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorTest.java @@ -14,8 +14,8 @@ public class BarrageStreamGeneratorTest { @Test public void testDrainableStreamIsEmptied() throws IOException { final int length = 512; - final BarrageStreamGeneratorImpl.DrainableByteArrayInputStream inputStream = - new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); + final DrainableByteArrayInputStream inputStream = + new DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); int bytesRead = inputStream.drainTo(new NullOutputStream()); @@ -26,12 +26,11 @@ public void testDrainableStreamIsEmptied() throws IOException { @Test public void testConsecutiveDrainableStreamIsEmptied() throws IOException { final int length = 512; - final BarrageStreamGeneratorImpl.DrainableByteArrayInputStream in1 = - new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); - final BarrageStreamGeneratorImpl.DrainableByteArrayInputStream in2 = - new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); - final BarrageStreamGeneratorImpl.ConsecutiveDrainableStreams inputStream = - new BarrageStreamGeneratorImpl.ConsecutiveDrainableStreams(in1, in2); + final DrainableByteArrayInputStream in1 = + new DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); + final DrainableByteArrayInputStream in2 = + new DrainableByteArrayInputStream(new byte[length * 2], length / 2, length); + final ConsecutiveDrainableStreams inputStream = new ConsecutiveDrainableStreams(in1, in2); int bytesRead = inputStream.drainTo(new NullOutputStream()); From 6a23dcfc8b050a610b926180ee5c96630ff548c7 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 31 May 2024 16:18:08 -0500 Subject: [PATCH 037/219] Remove dead method --- .../barrage/BarrageStreamGeneratorImpl.java | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index b5deafc6c67..a295a65f86b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -38,7 +38,6 @@ import io.deephaven.util.datastructures.SizeException; import io.deephaven.util.mutable.MutableInt; import io.deephaven.util.mutable.MutableLong; -import io.grpc.Drainable; import org.apache.arrow.flatbuf.Buffer; import org.apache.arrow.flatbuf.FieldNode; import org.apache.arrow.flatbuf.RecordBatch; @@ -47,7 +46,6 @@ import org.jetbrains.annotations.Nullable; import java.io.IOException; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.*; import java.util.function.Consumer; @@ -1111,25 +1109,12 @@ protected int addToFlatBuffer(final RowSet viewport, final FlatBufferBuilder bui } public static class BitSetGenerator extends ByteArrayGenerator { - public final BitSet original; - - public BitSetGenerator(final BitSet bitset) throws IOException { - this.original = bitset == null ? new BitSet() : bitset; + public BitSetGenerator(final BitSet bitset) { + BitSet original = bitset == null ? new BitSet() : bitset; this.raw = original.toByteArray(); final int nBits = original.previousSetBit(Integer.MAX_VALUE - 1) + 1; this.len = (int) ((long) nBits + 7) / 8; } - - public int addToFlatBuffer(final BitSet mine, final FlatBufferBuilder builder) throws IOException { - if (mine.equals(original)) { - return addToFlatBuffer(builder); - } - - final byte[] nraw = mine.toByteArray(); - final int nBits = mine.previousSetBit(Integer.MAX_VALUE - 1) + 1; - final int nlen = (int) ((long) nBits + 7) / 8; - return builder.createByteVector(nraw, 0, nlen); - } } public static class RowSetShiftDataGenerator extends ByteArrayGenerator { From a321d830069786d4c41ce03c27f38a771fae77ae Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 4 Jun 2024 10:31:15 -0500 Subject: [PATCH 038/219] Javadoc suggestions --- .../deephaven/extensions/barrage/BarrageStreamGenerator.java | 4 ++++ .../extensions/barrage/BarrageStreamGeneratorImpl.java | 5 +++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java index 2d3b55fe7fd..2c0375235ae 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGenerator.java @@ -22,6 +22,10 @@ */ public interface BarrageStreamGenerator extends SafeCloseable { + /** + * Represents a single update, which might be sent as multiple distinct payloads as necessary based in the + * implementation. + */ interface MessageView { void forEachStream(Consumer visitor) throws IOException; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index a295a65f86b..badc257aa78 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -647,8 +647,9 @@ int visit(final RecordBatchMessageView view, final long startRange, final int ta } /** - * Returns an InputStream of the message filtered to the viewport. This function accepts `targetBatchSize` but may - * actually write fewer rows than the target (when crossing an internal chunk boundary, e.g.) + * Returns an InputStream of a single FlightData message filtered to the viewport (if provided). This function + * accepts {@code targetBatchSize}, but may actually write fewer rows than the target (e.g. when crossing an + * internal chunk boundary). * * @param view the view of the overall chunk to generate a RecordBatch for * @param offset the start of the batch in position space w.r.t. the view (inclusive) From d24c2af934fd8fff964e692eae89d01c3c04b6af Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 23 Apr 2024 20:34:05 -0500 Subject: [PATCH 039/219] Hack and un-hack chunk input stream generator impls for gwt compat --- .../main/resources/io/deephaven/Util.gwt.xml | 2 +- .../chunk/BaseChunkInputStreamGenerator.java | 3 +-- .../BooleanChunkInputStreamGenerator.java | 3 --- .../chunk/ByteChunkInputStreamGenerator.java | 3 --- .../chunk/CharChunkInputStreamGenerator.java | 3 --- .../chunk/ChunkInputStreamGenerator.java | 3 --- .../DoubleChunkInputStreamGenerator.java | 3 --- .../chunk/FloatChunkInputStreamGenerator.java | 3 --- .../chunk/IntChunkInputStreamGenerator.java | 3 --- .../chunk/LongChunkInputStreamGenerator.java | 3 --- .../chunk/ShortChunkInputStreamGenerator.java | 3 --- .../VarBinaryChunkInputStreamGenerator.java | 25 ++++++++----------- .../VarListChunkInputStreamGenerator.java | 2 -- .../VectorChunkInputStreamGenerator.java | 3 --- 14 files changed, 13 insertions(+), 49 deletions(-) diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml index d8e3aa692ad..a711bf4e95d 100644 --- a/Util/src/main/resources/io/deephaven/Util.gwt.xml +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -4,7 +4,7 @@ - + \ No newline at end of file diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java index f220d7acff7..e68bec57110 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java @@ -80,7 +80,6 @@ protected static int getNumLongsForBitPackOfSize(final int numElements) { return ((numElements + 63) / 64); } - @GwtIncompatible abstract class BaseChunkInputStream extends DrainableColumn { protected final StreamReaderOptions options; protected final RowSequence subset; @@ -90,7 +89,7 @@ abstract class BaseChunkInputStream extends DrainableColumn { this.options = options; this.subset = chunk.size() == 0 ? RowSequenceFactory.EMPTY : subset != null ? subset.copy() : RowSequenceFactory.forRange(0, chunk.size() - 1); - REFERENCE_COUNT_UPDATER.incrementAndGet(BaseChunkInputStreamGenerator.this); +// REFERENCE_COUNT_UPDATER.incrementAndGet(BaseChunkInputStreamGenerator.this); // ignore the empty chunk as these are intentionally empty generators that should work for any subset if (chunk.size() > 0 && this.subset.lastRowKey() >= chunk.size()) { throw new IllegalStateException( diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java index 88cc47b4135..8c1503b67f8 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java @@ -3,7 +3,6 @@ // package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; @@ -50,12 +49,10 @@ public static BooleanChunkInputStreamGenerator convertBoxed( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new BooleanChunkInputStream(options, subset); } - @GwtIncompatible private class BooleanChunkInputStream extends BaseChunkInputStream { private BooleanChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index b529e6abc67..e74ea427b94 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static ByteChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new ByteChunkInputStream(options, subset); } - @GwtIncompatible private class ByteChunkInputStream extends BaseChunkInputStream { private ByteChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 248abebbda0..1ad33382d63 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -3,7 +3,6 @@ // package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -61,12 +60,10 @@ public static CharChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new CharChunkInputStream(options, subset); } - @GwtIncompatible private class CharChunkInputStream extends BaseChunkInputStream { private CharChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 61d862632a7..351944b3587 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -3,8 +3,6 @@ // package io.deephaven.extensions.barrage.chunk; -import com.google.common.base.Charsets; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; @@ -392,7 +390,6 @@ static WritableChunk extractChunkFromInputStream( * @param subset if provided, is a position-space filter of source data * @return a single-use DrainableColumn ready to be drained via grpc */ - @GwtIncompatible DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException; final class FieldNodeInfo { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index f85bde1d505..619afe68ce3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static DoubleChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new DoubleChunkInputStream(options, subset); } - @GwtIncompatible private class DoubleChunkInputStream extends BaseChunkInputStream { private DoubleChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index b14ec0af294..dfb50d77660 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static FloatChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new FloatChunkInputStream(options, subset); } - @GwtIncompatible private class FloatChunkInputStream extends BaseChunkInputStream { private FloatChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index d5fe40a2e9a..c6201928c17 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static IntChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new IntChunkInputStream(options, subset); } - @GwtIncompatible private class IntChunkInputStream extends BaseChunkInputStream { private IntChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index c5f313f58fc..c86ca81166d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static LongChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new LongChunkInputStream(options, subset); } - @GwtIncompatible private class LongChunkInputStream extends BaseChunkInputStream { private LongChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index df21280fec1..4f440df9a7f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -7,7 +7,6 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; @@ -65,12 +64,10 @@ public static ShortChunkInputStreamGenerator convertWithTransform( } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) { return new ShortChunkInputStream(options, subset); } - @GwtIncompatible private class ShortChunkInputStream extends BaseChunkInputStream { private ShortChunkInputStream(final StreamReaderOptions options, final RowSet subset) { super(chunk, options, subset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java index 18c0d0de715..54981ece538 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java @@ -228,14 +228,12 @@ public void close() { } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new ObjectChunkInputStream(options, subset); } - @GwtIncompatible private class ObjectChunkInputStream extends BaseChunkInputStream { private int cachedSize = -1; @@ -282,11 +280,11 @@ public void visitBuffers(final BufferListener listener) { // payload final MutableLong numPayloadBytes = new MutableLong(); subset.forAllRowKeyRanges((s, e) -> { - numPayloadBytes.add(byteStorage.getPayloadSize((int) s, (int) e)); + numPayloadBytes.addAndGet(byteStorage.getPayloadSize((int) s, (int) e)); }); final long payloadExtended = numPayloadBytes.get() & REMAINDER_MOD_8_MASK; if (payloadExtended > 0) { - numPayloadBytes.add(8 - payloadExtended); + numPayloadBytes.addAndGet(8 - payloadExtended); } listener.noteLogicalBuffer(numPayloadBytes.get()); } @@ -296,27 +294,27 @@ protected int getRawSize() { if (cachedSize == -1) { MutableLong totalCachedSize = new MutableLong(0L); if (sendValidityBuffer()) { - totalCachedSize.add(getValidityMapSerializationSizeFor(subset.intSize(DEBUG_NAME))); + totalCachedSize.addAndGet(getValidityMapSerializationSizeFor(subset.intSize(DEBUG_NAME))); } // there are n+1 offsets; it is not assumed first offset is zero if (!subset.isEmpty() && subset.size() == byteStorage.offsets.size() - 1) { - totalCachedSize.add(byteStorage.offsets.size() * (long) Integer.BYTES); - totalCachedSize.add(byteStorage.size()); + totalCachedSize.addAndGet(byteStorage.offsets.size() * (long) Integer.BYTES); + totalCachedSize.addAndGet(byteStorage.size()); } else { - totalCachedSize.add(subset.isEmpty() ? 0 : Integer.BYTES); // account for the n+1 offset + totalCachedSize.addAndGet(subset.isEmpty() ? 0 : Integer.BYTES); // account for the n+1 offset subset.forAllRowKeyRanges((s, e) -> { // account for offsets - totalCachedSize.add((e - s + 1) * Integer.BYTES); + totalCachedSize.addAndGet((e - s + 1) * Integer.BYTES); // account for payload - totalCachedSize.add(byteStorage.getPayloadSize((int) s, (int) e)); + totalCachedSize.addAndGet(byteStorage.getPayloadSize((int) s, (int) e)); }); } if (!subset.isEmpty() && (subset.size() & 0x1) == 0) { // then we must also align offset array - totalCachedSize.add(Integer.BYTES); + totalCachedSize.addAndGet(Integer.BYTES); } cachedSize = LongSizedDataStructure.intSize(DEBUG_NAME, totalCachedSize.get()); } @@ -365,8 +363,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { final MutableInt logicalSize = new MutableInt(); subset.forAllRowKeys((idx) -> { try { - logicalSize.add(LongSizedDataStructure.intSize("int cast", - byteStorage.getPayloadSize((int) idx, (int) idx))); + logicalSize.addAndGet((int) byteStorage.getPayloadSize((int) idx, (int) idx)); dos.writeInt(logicalSize.get()); } catch (final IOException e) { throw new UncheckedDeephavenException("couldn't drain data to OutputStream", e); @@ -383,7 +380,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { final MutableLong payloadLen = new MutableLong(); subset.forAllRowKeyRanges((s, e) -> { try { - payloadLen.add(byteStorage.writePayload(dos, (int) s, (int) e)); + payloadLen.addAndGet(byteStorage.writePayload(dos, (int) s, (int) e)); } catch (final IOException err) { throw new UncheckedDeephavenException("couldn't drain data to OutputStream", err); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index ca92eabf63f..0c63e45fb6e 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -80,14 +80,12 @@ public void close() { } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new VarListInputStream(options, subset); } - @GwtIncompatible private class VarListInputStream extends BaseChunkInputStream { private int cachedSize = -1; private final WritableIntChunk myOffsets; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 539166af7e2..35245b11631 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -3,7 +3,6 @@ // package io.deephaven.extensions.barrage.chunk; -import com.google.common.annotations.GwtIncompatible; import com.google.common.io.LittleEndianDataOutputStream; import io.deephaven.UncheckedDeephavenException; import io.deephaven.chunk.ChunkType; @@ -79,14 +78,12 @@ public void close() { } @Override - @GwtIncompatible public DrainableColumn getInputStream(final StreamReaderOptions options, @Nullable final RowSet subset) throws IOException { computePayload(); return new VarListInputStream(options, subset); } - @GwtIncompatible private class VarListInputStream extends BaseChunkInputStream { private int cachedSize = -1; private final WritableIntChunk myOffsets; From 46777dea236bf28f3e426fc86a1113ec67b02055 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 23 Apr 2024 20:34:57 -0500 Subject: [PATCH 040/219] Non-compiling draft of web bsgi, and hacked up rowset super source --- .../client/api/barrage/WebBarrageMessage.java | 12 +- .../barrage/WebBarrageStreamGenerator.java | 639 ++++++++++++++++++ .../util/pools/ChunkPoolReleaseTracking.java | 6 +- .../deephaven/engine/rowset/RowSequence.java | 24 + .../engine/rowset/RowSequenceFactory.java | 11 + .../io/deephaven/engine/rowset/RowSet.java | 11 + .../rowset/RowSetBuilderSequential.java | 11 + .../engine/rowset/RowSetFactory.java | 12 + 8 files changed, 717 insertions(+), 9 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java index 877c7e09330..1b26f2ccadb 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageMessage.java @@ -15,16 +15,16 @@ public class WebBarrageMessage { public static class ModColumnData { public RangeSet rowsModified; - // public Class type; - // public Class componentType; + public Class type; + public Class componentType; public ArrayList> data; - // public ChunkType chunkType; + public ChunkType chunkType; } public static class AddColumnData { - // public Class type; - // public Class componentType; + public Class type; + public Class componentType; public ArrayList> data; - // public ChunkType chunkType; + public ChunkType chunkType; } public long firstSeq = -1; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java new file mode 100644 index 00000000000..8e0451367c4 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java @@ -0,0 +1,639 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage; + +import com.google.flatbuffers.FlatBufferBuilder; +import elemental2.core.ArrayBufferView; +import elemental2.core.Uint8Array; +import io.deephaven.UncheckedDeephavenException; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.chunk.sized.SizedChunk; +import io.deephaven.chunk.sized.SizedLongChunk; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.extensions.barrage.BarrageSnapshotOptions; +import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; +import io.deephaven.extensions.barrage.util.BarrageUtil; +import io.deephaven.extensions.barrage.util.DefensiveDrainable; +import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.util.datastructures.SizeException; +import io.deephaven.web.client.api.parse.JsDataHandler; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.web.shared.data.ShiftedRange; +import org.apache.arrow.flatbuf.Buffer; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.FieldNode; +import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MetadataVersion; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; +import org.gwtproject.nio.TypedArrayHelper; +import org.jetbrains.annotations.Nullable; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; +import java.util.function.ToIntFunction; + +/** + * Roughly mirrors BarrageStreamGeneratorImpl, except that viewports are not supported, and the grpc-web TS library is + * used for FlightData rather than InputStream to send data to the server. + */ +public class WebBarrageStreamGenerator { + private static final int DEFAULT_INITIAL_BATCH_SIZE = 4096; + private static final int DEFAULT_BATCH_SIZE = 1 << 16; + + private static final int DEFAULT_MESSAGE_SIZE_LIMIT = 100 * 1024 * 1024; + + public interface View { + void forEachStream(Consumer visitor) throws IOException; + + default boolean isViewport() { + return false; + } + + StreamReaderOptions options(); + + int clientMaxMessageSize(); + + RangeSet addRowOffsets(); + + RangeSet modRowOffsets(int col); + } + public class SnapshotView implements View { + private final BarrageSnapshotOptions options; + // private final RangeSet viewport; + // private final boolean reverseViewport; + private final BitSet subscribedColumns; + private final long numAddRows; + private final RangeSet addRowKeys; + private final RangeSet addRowOffsets; + + public SnapshotView(BarrageSnapshotOptions options, + @Nullable final BitSet subscribedColumns) { + this.options = options; + + this.subscribedColumns = subscribedColumns; + + this.addRowKeys = WebBarrageStreamGenerator.this.rowsAdded; + this.addRowOffsets = RangeSet.ofRange(0, addRowKeys.size() - 1); + + this.numAddRows = addRowOffsets.size(); + } + + @Override + public void forEachStream(Consumer visitor) { + ByteBuffer metadata = WebBarrageStreamGenerator.this.getSnapshotMetadata(this); + AtomicLong bytesWritten = new AtomicLong(0); + + int maxBatchSize = options().batchSize(); + if (maxBatchSize <= 0) { + maxBatchSize = DEFAULT_BATCH_SIZE; + } + AtomicInteger actualBatchSize = new AtomicInteger(); + if (numAddRows == 0) { + // we still need to send a message containing metadata when there are no rows + FlightData inputStream = WebBarrageStreamGenerator.this + .getInputStream(this, 0, 0, actualBatchSize, metadata, + WebBarrageStreamGenerator.this::appendAddColumns); + visitor.accept(inputStream); + } else { + // send the add batches + WebBarrageStreamGenerator.this.processBatches(visitor, this, numAddRows, maxBatchSize, metadata, + WebBarrageStreamGenerator.this::appendAddColumns, bytesWritten); + } + } + + @Override + public StreamReaderOptions options() { + return options; + } + + @Override + public int clientMaxMessageSize() { + return options.maxMessageSize(); + } + + @Override + public RangeSet addRowOffsets() { + return addRowOffsets; + } + + @Override + public RangeSet modRowOffsets(int col) { + throw new UnsupportedOperationException("asked for mod row on SnapshotView"); + } + } + public static class SchemaView implements View { + private final ByteBuffer msgBytes; + + public SchemaView(ByteBuffer msgBytes) { + this.msgBytes = msgBytes; + } + + @Override + public void forEachStream(Consumer visitor) { + visitor.accept(new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(msgBytes, 0, msgBytes.l)); + FlightData data = new FlightData(); + ArrayBufferView view = TypedArrayHelper.unwrap(msgBytes); + new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining()); + data.setDataHeader(new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining())); + visitor.accept(data); + } + + @Override + public StreamReaderOptions options() { + return null; + } + + @Override + public int clientMaxMessageSize() { + return 0; + } + + @Override + public RangeSet addRowOffsets() { + return null; + } + + @Override + public RangeSet modRowOffsets(int col) { + return null; + } + } + + private interface ColumnVisitor { + int visit(View view, long startRange, int targetBatchSize, Consumer addInputStream, + ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; + } + + public static class ModColumnGenerator { + final RangeSet rowsModified; + final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data; + + public ModColumnGenerator(RangeSet rowsModified, + BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data) { + this.rowsModified = rowsModified; + this.data = data; + } + } + + public static class Factory { + public WebBarrageStreamGenerator newGenerator(WebBarrageMessage message) { + return new WebBarrageStreamGenerator(message); + } + + public SchemaView getSchemaView(ToIntFunction schemaWriter) { + final FlatBufferBuilder builder = new FlatBufferBuilder(); + final int schemaOffset = schemaWriter.applyAsInt(builder); + Message.startMessage(builder); + Message.addHeaderType(builder, org.apache.arrow.flatbuf.MessageHeader.Schema); + Message.addHeader(builder, schemaOffset); + Message.addVersion(builder, MetadataVersion.V5); + Message.addBodyLength(builder, 0); + builder.finish(Message.endMessage(builder)); + return new SchemaView(builder.dataBuffer()); + } + } + + private final WebBarrageMessage message; + + private final RangeSet rowsAdded; + private final RangeSet rowsRemoved; + private final ShiftedRange[] shifted; + + private final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator[] addColumnData; + private final ModColumnGenerator[] modColumnData; + + public WebBarrageStreamGenerator(WebBarrageMessage message) { + this.message = message; + + this.rowsAdded = message.rowsAdded; + this.rowsRemoved = message.rowsRemoved; + this.shifted = message.shifted; + + addColumnData = new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator[message.addColumnData.length]; + for (int i = 0; i < message.addColumnData.length; i++) { + WebBarrageMessage.AddColumnData columnData = message.addColumnData[i]; + addColumnData[i] = new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator(columnData.chunkType, + columnData.type, columnData.componentType, columnData.data); + } + + modColumnData = new ModColumnGenerator[message.modColumnData.length]; + for (int i = 0; i < modColumnData.length; i++) { + WebBarrageMessage.ModColumnData columnData = message.modColumnData[i]; + modColumnData[i] = new ModColumnGenerator(columnData.rowsModified, + new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator(columnData.chunkType, columnData.type, + columnData.componentType, columnData.data)); + } + } + + public SnapshotView getSnapshotView(BarrageSnapshotOptions options, BitSet snapshotColumns) { + return new SnapshotView(options, snapshotColumns); + } + + private FlightData getInputStream(View view, long offset, int targetBatchSize, AtomicInteger actualBatchSize, + ByteBuffer metadata, ColumnVisitor columnVisitor) throws IOException { + List streams = new ArrayList<>(); + AtomicInteger size = new AtomicInteger(); + + final Consumer addStream = (final DefensiveDrainable data) -> { + try { + final int sz = data.available(); + if (sz == 0) { + data.close(); + return; + } + + streams.add(data); + size.addAndGet(sz); + } catch (final IOException e) { + throw new UncheckedDeephavenException("Unexpected IOException", e); + } + }; + + + FlatBufferBuilder header = new FlatBufferBuilder(1024); + + final int numRows; + final int nodesOffset; + final int buffersOffset; + try (SizedChunk nodeOffsets = new SizedChunk<>(ChunkType.Object); + SizedLongChunk bufferInfos = new SizedLongChunk<>()) { + nodeOffsets.ensureCapacity(addColumnData.length); + nodeOffsets.get().setSize(0); + bufferInfos.ensureCapacity(addColumnData.length * 3); + bufferInfos.get().setSize(0); + + final AtomicLong totalBufferLength = new AtomicLong(); + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener = + (numElements, nullCount) -> { + nodeOffsets.ensureCapacityPreserve(nodeOffsets.get().size() + 1); + nodeOffsets.get().asWritableObjectChunk() + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); + }; + + final ChunkInputStreamGenerator.BufferListener bufferListener = (length) -> { + totalBufferLength.addAndGet(length); + bufferInfos.ensureCapacityPreserve(bufferInfos.get().size() + 1); + bufferInfos.get().add(length); + }; + + + numRows = columnVisitor.visit(view, offset, targetBatchSize, addStream, fieldNodeListener, bufferListener); + actualBatchSize.set(numRows); + + final WritableChunk noChunk = nodeOffsets.get(); + RecordBatch.startNodesVector(header, noChunk.size()); + for (int i = noChunk.size() - 1; i >= 0; --i) { + final ChunkInputStreamGenerator.FieldNodeInfo node = + (ChunkInputStreamGenerator.FieldNodeInfo) noChunk.asObjectChunk().get(i); + FieldNode.createFieldNode(header, node.numElements, node.nullCount); + } + nodesOffset = header.endVector(); + + final WritableLongChunk biChunk = bufferInfos.get(); + RecordBatch.startBuffersVector(header, biChunk.size()); + for (int i = biChunk.size() - 1; i >= 0; --i) { + totalBufferLength.addAndGet(-biChunk.get(i)); + Buffer.createBuffer(header, totalBufferLength.longValue(), biChunk.get(i)); + } + buffersOffset = header.endVector(); + } + + + RecordBatch.startRecordBatch(header); + RecordBatch.addNodes(header, nodesOffset); + RecordBatch.addBuffers(header, buffersOffset); + if (view.options().columnsAsList()) { + RecordBatch.addLength(header, 1); + } else { + RecordBatch.addLength(header, numRows); + } + final int headerOffset = RecordBatch.endRecordBatch(header); + + header.finish(wrapInMessage(header, headerOffset, + org.apache.arrow.flatbuf.MessageHeader.RecordBatch, size.intValue())); + +// // now create the proto header +// try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream()) { +// writeHeader(metadata, size, header, baos); +// streams.add(0, +// new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); +// +// return new BarrageStreamGeneratorImpl.ConsecutiveDrainableStreams(streams.toArray(new InputStream[0])); +// } catch (final IOException ex) { +// throw new UncheckedDeephavenException("Unexpected IOException", ex); +// } + FlightData result = new FlightData(); + result.setDataHeader(WebBarrageUtils.bbToUint8ArrayView(header.dataBuffer())); + return result; + } + + public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, final byte headerType, + final int bodyLength) { + Message.startMessage(builder); + Message.addHeaderType(builder, headerType); + Message.addHeader(builder, headerOffset); + Message.addVersion(builder, MetadataVersion.V5); + Message.addBodyLength(builder, bodyLength); + return Message.endMessage(builder); + } + + private ByteBuffer getSnapshotMetadata(SnapshotView view) { + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int effectiveViewportOffset = 0; + // if (view.isViewport()) { + // try (final BarrageStreamGeneratorImpl.RowSetGenerator viewportGen = new + // BarrageStreamGeneratorImpl.RowSetGenerator(view.viewport)) { + // effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); + // } + // } + + int effectiveColumnSetOffset = 0; + if (view.subscribedColumns != null) { + int nBits = view.subscribedColumns.previousSetBit(Integer.MAX_VALUE - 1) + 1; + effectiveColumnSetOffset = + metadata.createByteVector(view.subscribedColumns.toByteArray(), 0, (int) ((long) nBits + 7) / 8); + } + + + final int rowsAddedOffset = addRangeSetToFlatBuffer(metadata, rowsAdded); + + // no shifts in a snapshot, but need to provide a valid structure + final int shiftDataOffset = addShiftedToFlatBuffer(metadata, shifted); + + // Added Chunk Data: + int addedRowsIncludedOffset = 0; + // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same + // addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(view.addRowKeys, metadata); + + BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); + BarrageUpdateMetadata.addIsSnapshot(metadata, true); + BarrageUpdateMetadata.addFirstSeq(metadata, -1); + BarrageUpdateMetadata.addLastSeq(metadata, -1); + BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); + BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); + BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); + BarrageUpdateMetadata.addRemovedRows(metadata, 0); + BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); + BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); + BarrageUpdateMetadata.addModColumnNodes(metadata, 0); + BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, false); + metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); + + final FlatBufferBuilder header = new FlatBufferBuilder(); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + BarrageMessageWrapper.startBarrageMessageWrapper(header); + BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); + BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); + BarrageMessageWrapper.addMsgPayload(header, payloadOffset); + header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); + + return header.dataBuffer().slice(); + } + + private int addRangeSetToFlatBuffer(FlatBufferBuilder builder, RangeSet rangeSet) { + return builder.createByteVector(CompressedRangeSetReader.writeRange(rangeSet)); + } + + private int addShiftedToFlatBuffer(FlatBufferBuilder builder, ShiftedRange[] ranges) { + if (ranges.length > 0) { + throw new UnsupportedOperationException("addShiftedToFlatBuffer"); + } + return 0; + } + + private void processBatches(Consumer visitor, View view, long numRows, int maxBatchSize, + ByteBuffer metadata, ColumnVisitor columnVisitor, AtomicLong bytesWritten) throws IOException { + long offset = 0; + AtomicInteger actualBatchSize = new AtomicInteger(); + + int batchSize = Math.min(DEFAULT_INITIAL_BATCH_SIZE, maxBatchSize); + + int maxMessageSize = view.clientMaxMessageSize() > 0 ? view.clientMaxMessageSize() : DEFAULT_MESSAGE_SIZE_LIMIT; + + while (offset < numRows) { + try { + FlightData is = + getInputStream(view, offset, batchSize, actualBatchSize, metadata, columnVisitor); + int bytesToWrite = is.getAppMetadata_asU8().length + is.getDataHeader_asU8().length + is.getDataBody_asU8().length; + + if (actualBatchSize.get() == 0) { + throw new IllegalStateException("No data was written for a batch"); + } + + // treat this as a hard limit, exceeding fails a client or w2w (unless we are sending a single + // row then we must send and let it potentially fail) + if (bytesToWrite < maxMessageSize || batchSize == 1) { + // let's write the data + visitor.accept(is); + + bytesWritten.addAndGet(bytesToWrite); + offset += actualBatchSize.intValue(); + metadata = null; + } // else, can't write this, we'll retry at the same offset as before + + // recompute the batch limit for the next message + int bytesPerRow = bytesToWrite / actualBatchSize.intValue(); + if (bytesPerRow > 0) { + int rowLimit = maxMessageSize / bytesPerRow; + + // add some margin for abnormal cell contents + batchSize = Math.min(maxBatchSize, Math.max(1, (int) ((double) rowLimit * 0.9))); + } + } catch (SizeException ex) { + // was an overflow in the ChunkInputStream generator (probably VarBinary). We can't compute the + // correct number of rows from this failure, so cut batch size in half and try again. This may + // occur multiple times until the size is restricted properly + if (batchSize == 1) { + // this row exceeds internal limits and can never be sent + throw (new UncheckedDeephavenException( + "BarrageStreamGenerator - single row (" + offset + ") exceeds transmissible size", ex)); + } + final int maximumSize = LongSizedDataStructure.intSize( + "BarrageStreamGenerator", ex.getMaximumSize()); + batchSize = maximumSize >= batchSize ? batchSize / 2 : maximumSize; + } + } + } + + private static int findGeneratorForOffset(final ChunkInputStreamGenerator[] generators, final long offset) { + // fast path for smaller updates + if (generators.length <= 1) { + return 0; + } + + int low = 0; + int high = generators.length; + + while (low + 1 < high) { + int mid = (low + high) / 2; + int cmp = Long.compare(generators[mid].getRowOffset(), offset); + + if (cmp < 0) { + // the generator's first key is low enough + low = mid; + } else if (cmp > 0) { + // the generator's first key is too high + high = mid; + } else { + // first key matches + return mid; + } + } + + // desired generator is at low as the high is exclusive + return low; + } + + + private int appendAddColumns(View view, long startRange, int targetBatchSize, Consumer addStream, + ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { + if (addColumnData.length == 0) { + return LongSizedDataStructure.intSize("view.addRowOffsets().size()", view.addRowOffsets().size()); + } + + // find the generator for the initial position-space key + long startPos = view.addRowOffsets().get(startRange); + int chunkIdx = findGeneratorForOffset(addColumnData[0].generators, startPos); + + // adjust the batch size if we would cross a chunk boundary + long shift = 0; + long endPos = view.addRowOffsets().get(startRange + targetBatchSize - 1); + if (endPos == RowSet.NULL_ROW_KEY) { + endPos = Long.MAX_VALUE; + } + if (addColumnData[0].generators.length > 0) { + final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators[chunkIdx]; + endPos = Math.min(endPos, tmpGenerator.getLastRowOffset()); + shift = -tmpGenerator.getRowOffset(); + } + + // all column generators have the same boundaries, so we can re-use the offsets internal to this chunkIdx + final RangeSet allowedRange = RangeSet.ofRange(startPos, endPos); + final RangeSet myAddedOffsets = view.addRowOffsets().intersect(allowedRange); + final RangeSet adjustedOffsets = shift == 0 ? null : myAddedOffsets.shift(shift); + // every column must write to the stream + for (final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data : addColumnData) { + final int numElements = data.generators.length == 0 + ? 0 + : LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); + if (view.options().columnsAsList()) { + // if we are sending columns as a list, we need to add the list buffers before each column + final SingleElementListHeaderInputStreamGenerator listHeader = + new SingleElementListHeaderInputStreamGenerator(numElements); + listHeader.visitFieldNodes(fieldNodeListener); + listHeader.visitBuffers(bufferListener); + addStream.accept(listHeader); + } + + if (numElements == 0) { + // use an empty generator to publish the column data + try (final RowSet empty = RowSetFactory.empty()) { + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + data.emptyGenerator.getInputStream(view.options(), empty); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } else { + final ChunkInputStreamGenerator generator = data.generators[chunkIdx]; + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + generator.getInputStream(view.options(), shift == 0 ? myAddedOffsets : adjustedOffsets); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } + return LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); + } + + ////// WebBarrageUtil + public static final BarrageSnapshotOptions DEFAULT_SNAPSHOT_DESER_OPTIONS = + BarrageSnapshotOptions.builder().build(); + + public static void sendSchema(Consumer stream, Map columnsAndTypes) { + Factory streamGeneratorFactory = new Factory(); + streamGeneratorFactory + .getSchemaView(fbb -> makeTableSchemaPayload(fbb, DEFAULT_SNAPSHOT_DESER_OPTIONS, columnsAndTypes)) + .forEachStream(stream); + } + + private static int makeTableSchemaPayload(FlatBufferBuilder fbb, BarrageSnapshotOptions options, + Map columnsAndTypes) { + int[] fields = new int[columnsAndTypes.size()]; + int i = 0; + for (Map.Entry entry : columnsAndTypes.entrySet()) { + // Unlike BarrageUtil.java, we need to implement this ourselves rather than delegate to Arrow's own types + String name = entry.getKey(); + String type = entry.getValue(); + + // TODO this is wrong for array/vector types + JsDataHandler writer = JsDataHandler.getHandler(type); + if (options.columnsAsList()) { + throw new UnsupportedOperationException("columnsAsList not supported"); + } + + int nameOffset = fbb.createString(name); + int typeOffset = writer.writeType(fbb); + int metadataOffset = Field.createCustomMetadataVector(fbb, new int[] { + KeyValue.createKeyValue(fbb, fbb.createString("deephaven:type"), + fbb.createString(writer.deephavenType())) + }); + + Field.startField(fbb); + Field.addName(fbb, nameOffset); + Field.addNullable(fbb, true); + + Field.addTypeType(fbb, writer.typeType()); + Field.addType(fbb, typeOffset); + Field.addCustomMetadata(fbb, metadataOffset); + + fields[i++] = Field.endField(fbb); + } + + int fieldsOffset = Schema.createFieldsVector(fbb, fields); + + Schema.startSchema(fbb); + Schema.addFields(fbb, fieldsOffset); + return Schema.endSchema(fbb); + } + + public static void sendSnapshot(Consumer stream, BarrageSnapshotOptions options) { + WebBarrageMessage msg = constructMessage(); + WebBarrageStreamGenerator bsg = new WebBarrageStreamGenerator(msg); + bsg.getSnapshotView(options, null).forEachStream(stream); + + } + + private static WebBarrageMessage constructMessage() { + return new WebBarrageMessage();// TODO need args to create this + } +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java index 9acff6511d7..c6e26966d4e 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/chunk/util/pools/ChunkPoolReleaseTracking.java @@ -1,6 +1,6 @@ -/** - * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending - */ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// package io.deephaven.chunk.util.pools; /** diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java new file mode 100644 index 00000000000..11ac09b4231 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java @@ -0,0 +1,24 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +import io.deephaven.util.SafeCloseable; +import io.deephaven.util.datastructures.LongAbortableConsumer; +import io.deephaven.util.datastructures.LongRangeConsumer; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +public interface RowSequence extends SafeCloseable, LongSizedDataStructure { + boolean isEmpty(); + long lastRowKey(); + boolean forEachRowKey(LongAbortableConsumer lac); + default void forAllRowKeys(java.util.function.LongConsumer lc) { + forEachRowKey((final long v) -> { + lc.accept(v); + return true; + }); + } +// void forAllRowKeys(java.util.function.LongConsumer lc); + + void forAllRowKeyRanges(LongRangeConsumer lrc); +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java new file mode 100644 index 00000000000..d3fd8100907 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java @@ -0,0 +1,11 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +public class RowSequenceFactory { + public static /*final*/ RowSequence EMPTY; + public static RowSequence forRange(final long firstRowKey, final long lastRowKey) { + return null; + } +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java new file mode 100644 index 00000000000..ebe0d6166b4 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java @@ -0,0 +1,11 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +import io.deephaven.util.SafeCloseable; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +public interface RowSet extends RowSequence, LongSizedDataStructure, SafeCloseable { + RowSet copy(); +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java new file mode 100644 index 00000000000..c7580427bc4 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java @@ -0,0 +1,11 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +import io.deephaven.util.datastructures.LongRangeConsumer; + +public interface RowSetBuilderSequential extends LongRangeConsumer { + void appendRange(long rangeFirstRowKey, long rangeLastRowKey); + RowSet build(); +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java new file mode 100644 index 00000000000..6ed9e89cd04 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java @@ -0,0 +1,12 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +import io.deephaven.engine.rowset.RowSetBuilderSequential; + +public class RowSetFactory { + public static RowSetBuilderSequential builderSequential() { + return null; + } +} \ No newline at end of file From d22544ee5a86eec5e55c7a86de8e6e40cbf3439d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 3 Jun 2024 13:33:53 -0500 Subject: [PATCH 041/219] Set aside old impl, it probably doesn't translate nicely any more --- .../WebBarrageStreamGenerator.java.bak | 637 ++++++++++++++++++ 1 file changed, 637 insertions(+) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java.bak diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java.bak b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java.bak new file mode 100644 index 00000000000..f4a886f37f0 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java.bak @@ -0,0 +1,637 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage; + +import com.google.flatbuffers.FlatBufferBuilder; +import elemental2.core.ArrayBufferView; +import elemental2.core.Uint8Array; +import io.deephaven.UncheckedDeephavenException; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.chunk.sized.SizedChunk; +import io.deephaven.chunk.sized.SizedLongChunk; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.extensions.barrage.BarrageSnapshotOptions; +import io.deephaven.extensions.barrage.ChunkListInputStreamGenerator; +import io.deephaven.extensions.barrage.DrainableByteArrayInputStream; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; +import io.deephaven.extensions.barrage.util.BarrageUtil; +import io.deephaven.extensions.barrage.util.DefensiveDrainable; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.util.datastructures.SizeException; +import io.deephaven.util.mutable.MutableInt; +import io.deephaven.util.mutable.MutableLong; +import io.deephaven.web.client.api.parse.JsDataHandler; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.web.shared.data.ShiftedRange; +import org.apache.arrow.flatbuf.Buffer; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.FieldNode; +import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MetadataVersion; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; +import org.gwtproject.nio.TypedArrayHelper; +import org.jetbrains.annotations.Nullable; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.ToIntFunction; + +/** + * Roughly mirrors BarrageStreamGeneratorImpl, except that viewports are not supported, and the grpc-web TS library is + * used for FlightData rather than InputStream to send data to the server. + */ +public class WebBarrageStreamGenerator { + private static final int DEFAULT_INITIAL_BATCH_SIZE = 4096; + private static final int DEFAULT_BATCH_SIZE = 1 << 16; + + private static final int DEFAULT_MESSAGE_SIZE_LIMIT = 100 * 1024 * 1024; + + public interface View { + void forEachStream(Consumer visitor) throws IOException; + + default boolean isViewport() { + return false; + } + + StreamReaderOptions options(); + + int clientMaxMessageSize(); + + RangeSet addRowOffsets(); + + RangeSet modRowOffsets(int col); + } + public class SnapshotView implements View { + private final BarrageSnapshotOptions options; + // private final RangeSet viewport; + // private final boolean reverseViewport; + private final BitSet subscribedColumns; + private final long numAddRows; + private final RangeSet addRowKeys; + private final RangeSet addRowOffsets; + + public SnapshotView(BarrageSnapshotOptions options, + @Nullable final BitSet subscribedColumns) { + this.options = options; + + this.subscribedColumns = subscribedColumns; + + this.addRowKeys = WebBarrageStreamGenerator.this.rowsAdded; + this.addRowOffsets = RangeSet.ofRange(0, addRowKeys.size() - 1); + + this.numAddRows = addRowOffsets.size(); + } + + @Override + public void forEachStream(Consumer visitor) throws IOException { + ByteBuffer metadata = WebBarrageStreamGenerator.this.getSnapshotMetadata(this); + MutableLong bytesWritten = new MutableLong(0L); + + int maxBatchSize = options().batchSize(); + if (maxBatchSize <= 0) { + maxBatchSize = DEFAULT_BATCH_SIZE; + } + final MutableInt actualBatchSize = new MutableInt(); + if (numAddRows == 0) { + // we still need to send a message containing metadata when there are no rows + FlightData inputStream = WebBarrageStreamGenerator.this + .getInputStream(this, 0, 0, actualBatchSize, metadata, + WebBarrageStreamGenerator.this::appendAddColumns); + visitor.accept(inputStream); + } else { + // send the add batches + WebBarrageStreamGenerator.this.processBatches(visitor, this, numAddRows, maxBatchSize, metadata, + WebBarrageStreamGenerator.this::appendAddColumns, bytesWritten); + } + } + + @Override + public StreamReaderOptions options() { + return options; + } + + @Override + public int clientMaxMessageSize() { + return options.maxMessageSize(); + } + + @Override + public RangeSet addRowOffsets() { + return addRowOffsets; + } + + @Override + public RangeSet modRowOffsets(int col) { + throw new UnsupportedOperationException("asked for mod row on SnapshotView"); + } + } + public static class SchemaView implements View { + private final ByteBuffer msgBytes; + + public SchemaView(ByteBuffer msgBytes) { + this.msgBytes = msgBytes; + } + + @Override + public void forEachStream(Consumer visitor) { + visitor.accept(new DrainableByteArrayInputStream(msgBytes, 0, msgBytes.remaining())); + FlightData data = new FlightData(); + ArrayBufferView view = TypedArrayHelper.unwrap(msgBytes); + new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining()); + data.setDataHeader(new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining())); + visitor.accept(data); + } + + @Override + public StreamReaderOptions options() { + return null; + } + + @Override + public int clientMaxMessageSize() { + return 0; + } + + @Override + public RangeSet addRowOffsets() { + return null; + } + + @Override + public RangeSet modRowOffsets(int col) { + return null; + } + } + + private interface ColumnVisitor { + int visit(View view, long startRange, int targetBatchSize, Consumer addInputStream, + ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; + } + + public static class ModColumnGenerator { + final RangeSet rowsModified; + final ChunkListInputStreamGenerator data; + + public ModColumnGenerator(RangeSet rowsModified, + ChunkListInputStreamGenerator data) { + this.rowsModified = rowsModified; + this.data = data; + } + } + + public static class Factory { + public WebBarrageStreamGenerator newGenerator(WebBarrageMessage message) { + return new WebBarrageStreamGenerator(message); + } + + public SchemaView getSchemaView(ToIntFunction schemaWriter) { + final FlatBufferBuilder builder = new FlatBufferBuilder(); + final int schemaOffset = schemaWriter.applyAsInt(builder); + Message.startMessage(builder); + Message.addHeaderType(builder, org.apache.arrow.flatbuf.MessageHeader.Schema); + Message.addHeader(builder, schemaOffset); + Message.addVersion(builder, MetadataVersion.V5); + Message.addBodyLength(builder, 0); + builder.finish(Message.endMessage(builder)); + return new SchemaView(builder.dataBuffer()); + } + } + + private final WebBarrageMessage message; + + private final RangeSet rowsAdded; + private final RangeSet rowsRemoved; + private final ShiftedRange[] shifted; + + private final ChunkListInputStreamGenerator[] addColumnData; + private final ModColumnGenerator[] modColumnData; + + public WebBarrageStreamGenerator(WebBarrageMessage message) { + this.message = message; + + this.rowsAdded = message.rowsAdded; + this.rowsRemoved = message.rowsRemoved; + this.shifted = message.shifted; + + addColumnData = new ChunkListInputStreamGenerator[message.addColumnData.length]; + for (int i = 0; i < message.addColumnData.length; i++) { + WebBarrageMessage.AddColumnData columnData = message.addColumnData[i]; + addColumnData[i] = new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, columnData.data, columnData.chunkType); + } + + modColumnData = new ModColumnGenerator[message.modColumnData.length]; + for (int i = 0; i < modColumnData.length; i++) { + WebBarrageMessage.ModColumnData columnData = message.modColumnData[i]; + modColumnData[i] = new ModColumnGenerator(columnData.rowsModified, + new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, columnData.data, columnData.chunkType)); + } + } + + public SnapshotView getSnapshotView(BarrageSnapshotOptions options, BitSet snapshotColumns) { + return new SnapshotView(options, snapshotColumns); + } + + private FlightData getInputStream(View view, long offset, int targetBatchSize, MutableInt actualBatchSize, + ByteBuffer metadata, ColumnVisitor columnVisitor) throws IOException { + List streams = new ArrayList<>(); + MutableInt size = new MutableInt(); + + final Consumer addStream = (final DefensiveDrainable data) -> { + try { + final int sz = data.available(); + if (sz == 0) { + data.close(); + return; + } + + streams.add(data); + size.addAndGet(sz); + } catch (final IOException e) { + throw new UncheckedDeephavenException("Unexpected IOException", e); + } + }; + + + FlatBufferBuilder header = new FlatBufferBuilder(1024); + + final int numRows; + final int nodesOffset; + final int buffersOffset; + try (SizedChunk nodeOffsets = new SizedChunk<>(ChunkType.Object); + SizedLongChunk bufferInfos = new SizedLongChunk<>()) { + nodeOffsets.ensureCapacity(addColumnData.length); + nodeOffsets.get().setSize(0); + bufferInfos.ensureCapacity(addColumnData.length * 3); + bufferInfos.get().setSize(0); + + final MutableLong totalBufferLength = new MutableLong(); + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener = + (numElements, nullCount) -> { + nodeOffsets.ensureCapacityPreserve(nodeOffsets.get().size() + 1); + nodeOffsets.get().asWritableObjectChunk() + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); + }; + + final ChunkInputStreamGenerator.BufferListener bufferListener = (length) -> { + totalBufferLength.addAndGet(length); + bufferInfos.ensureCapacityPreserve(bufferInfos.get().size() + 1); + bufferInfos.get().add(length); + }; + + + numRows = columnVisitor.visit(view, offset, targetBatchSize, addStream, fieldNodeListener, bufferListener); + actualBatchSize.set(numRows); + + final WritableChunk noChunk = nodeOffsets.get(); + RecordBatch.startNodesVector(header, noChunk.size()); + for (int i = noChunk.size() - 1; i >= 0; --i) { + final ChunkInputStreamGenerator.FieldNodeInfo node = + (ChunkInputStreamGenerator.FieldNodeInfo) noChunk.asObjectChunk().get(i); + FieldNode.createFieldNode(header, node.numElements, node.nullCount); + } + nodesOffset = header.endVector(); + + final WritableLongChunk biChunk = bufferInfos.get(); + RecordBatch.startBuffersVector(header, biChunk.size()); + for (int i = biChunk.size() - 1; i >= 0; --i) { + totalBufferLength.addAndGet(-biChunk.get(i)); + Buffer.createBuffer(header, totalBufferLength.get(), biChunk.get(i)); + } + buffersOffset = header.endVector(); + } + + + RecordBatch.startRecordBatch(header); + RecordBatch.addNodes(header, nodesOffset); + RecordBatch.addBuffers(header, buffersOffset); + if (view.options().columnsAsList()) { + RecordBatch.addLength(header, 1); + } else { + RecordBatch.addLength(header, numRows); + } + final int headerOffset = RecordBatch.endRecordBatch(header); + + header.finish(wrapInMessage(header, headerOffset, + org.apache.arrow.flatbuf.MessageHeader.RecordBatch, size.get())); + +// // now create the proto header +// try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream()) { +// writeHeader(metadata, size, header, baos); +// streams.add(0, +// new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); +// +// return new BarrageStreamGeneratorImpl.ConsecutiveDrainableStreams(streams.toArray(new InputStream[0])); +// } catch (final IOException ex) { +// throw new UncheckedDeephavenException("Unexpected IOException", ex); +// } + FlightData result = new FlightData(); + result.setDataHeader(WebBarrageUtils.bbToUint8ArrayView(header.dataBuffer())); + return result; + } + + public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, final byte headerType, + final int bodyLength) { + Message.startMessage(builder); + Message.addHeaderType(builder, headerType); + Message.addHeader(builder, headerOffset); + Message.addVersion(builder, MetadataVersion.V5); + Message.addBodyLength(builder, bodyLength); + return Message.endMessage(builder); + } + + private ByteBuffer getSnapshotMetadata(SnapshotView view) { + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int effectiveViewportOffset = 0; + // if (view.isViewport()) { + // try (final BarrageStreamGeneratorImpl.RowSetGenerator viewportGen = new + // BarrageStreamGeneratorImpl.RowSetGenerator(view.viewport)) { + // effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); + // } + // } + + int effectiveColumnSetOffset = 0; + if (view.subscribedColumns != null) { + int nBits = view.subscribedColumns.previousSetBit(Integer.MAX_VALUE - 1) + 1; + effectiveColumnSetOffset = + metadata.createByteVector(view.subscribedColumns.toByteArray(), 0, (int) ((long) nBits + 7) / 8); + } + + + final int rowsAddedOffset = addRangeSetToFlatBuffer(metadata, rowsAdded); + + // no shifts in a snapshot, but need to provide a valid structure + final int shiftDataOffset = addShiftedToFlatBuffer(metadata, shifted); + + // Added Chunk Data: + int addedRowsIncludedOffset = 0; + // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same + // addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(view.addRowKeys, metadata); + + BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); + BarrageUpdateMetadata.addIsSnapshot(metadata, true); + BarrageUpdateMetadata.addFirstSeq(metadata, -1); + BarrageUpdateMetadata.addLastSeq(metadata, -1); + BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); + BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); + BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); + BarrageUpdateMetadata.addRemovedRows(metadata, 0); + BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); + BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); + BarrageUpdateMetadata.addModColumnNodes(metadata, 0); + BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, false); + metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); + + final FlatBufferBuilder header = new FlatBufferBuilder(); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + BarrageMessageWrapper.startBarrageMessageWrapper(header); + BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); + BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); + BarrageMessageWrapper.addMsgPayload(header, payloadOffset); + header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); + + return header.dataBuffer().slice(); + } + + private int addRangeSetToFlatBuffer(FlatBufferBuilder builder, RangeSet rangeSet) { + return builder.createByteVector(CompressedRangeSetReader.writeRange(rangeSet)); + } + + private int addShiftedToFlatBuffer(FlatBufferBuilder builder, ShiftedRange[] ranges) { + if (ranges.length > 0) { + throw new UnsupportedOperationException("addShiftedToFlatBuffer"); + } + return 0; + } + + private void processBatches(Consumer visitor, View view, long numRows, int maxBatchSize, + ByteBuffer metadata, ColumnVisitor columnVisitor, MutableLong bytesWritten) throws IOException { + long offset = 0; + MutableInt actualBatchSize = new MutableInt(); + + int batchSize = Math.min(DEFAULT_INITIAL_BATCH_SIZE, maxBatchSize); + + int maxMessageSize = view.clientMaxMessageSize() > 0 ? view.clientMaxMessageSize() : DEFAULT_MESSAGE_SIZE_LIMIT; + + while (offset < numRows) { + try { + FlightData is = + getInputStream(view, offset, batchSize, actualBatchSize, metadata, columnVisitor); + int bytesToWrite = is.getAppMetadata_asU8().length + is.getDataHeader_asU8().length + is.getDataBody_asU8().length; + + if (actualBatchSize.get() == 0) { + throw new IllegalStateException("No data was written for a batch"); + } + + // treat this as a hard limit, exceeding fails a client or w2w (unless we are sending a single + // row then we must send and let it potentially fail) + if (bytesToWrite < maxMessageSize || batchSize == 1) { + // let's write the data + visitor.accept(is); + + bytesWritten.addAndGet(bytesToWrite); + offset += actualBatchSize.get(); + metadata = null; + } // else, can't write this, we'll retry at the same offset as before + + // recompute the batch limit for the next message + int bytesPerRow = bytesToWrite / actualBatchSize.get(); + if (bytesPerRow > 0) { + int rowLimit = maxMessageSize / bytesPerRow; + + // add some margin for abnormal cell contents + batchSize = Math.min(maxBatchSize, Math.max(1, (int) ((double) rowLimit * 0.9))); + } + } catch (SizeException ex) { + // was an overflow in the ChunkInputStream generator (probably VarBinary). We can't compute the + // correct number of rows from this failure, so cut batch size in half and try again. This may + // occur multiple times until the size is restricted properly + if (batchSize == 1) { + // this row exceeds internal limits and can never be sent + throw (new UncheckedDeephavenException( + "BarrageStreamGenerator - single row (" + offset + ") exceeds transmissible size", ex)); + } + final int maximumSize = LongSizedDataStructure.intSize( + "BarrageStreamGenerator", ex.getMaximumSize()); + batchSize = maximumSize >= batchSize ? batchSize / 2 : maximumSize; + } + } + } + + private static int findGeneratorForOffset(final List generators, final long offset) { + // fast path for smaller updates + if (generators.size() <= 1) { + return 0; + } + + int low = 0; + int high = generators.size(); + + while (low + 1 < high) { + int mid = (low + high) / 2; + int cmp = Long.compare(generators.get(mid).getRowOffset(), offset); + + if (cmp < 0) { + // the generator's first key is low enough + low = mid; + } else if (cmp > 0) { + // the generator's first key is too high + high = mid; + } else { + // first key matches + return mid; + } + } + + // desired generator is at low as the high is exclusive + return low; + } + + + private int appendAddColumns(View view, long startRange, int targetBatchSize, Consumer addStream, + ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { + if (addColumnData.length == 0) { + return LongSizedDataStructure.intSize("view.addRowOffsets().size()", view.addRowOffsets().size()); + } + + // find the generator for the initial position-space key + long startPos = view.addRowOffsets().get(startRange); + int chunkIdx = findGeneratorForOffset(addColumnData[0].generators(), startPos); + + // adjust the batch size if we would cross a chunk boundary + long shift = 0; + long endPos = view.addRowOffsets().get(startRange + targetBatchSize - 1); + if (endPos == RowSet.NULL_ROW_KEY) { + endPos = Long.MAX_VALUE; + } + if (addColumnData[0].generators().size() > 0) { + final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators().get(chunkIdx); + endPos = Math.min(endPos, tmpGenerator.getLastRowOffset()); + shift = -tmpGenerator.getRowOffset(); + } + + // all column generators have the same boundaries, so we can re-use the offsets internal to this chunkIdx + final RangeSet allowedRange = RangeSet.ofRange(startPos, endPos); + final RangeSet myAddedOffsets = view.addRowOffsets().intersect(allowedRange); + final RangeSet adjustedOffsets = shift == 0 ? null : myAddedOffsets.shift(shift); + // every column must write to the stream + for (final ChunkListInputStreamGenerator data : addColumnData) { + final int numElements = data.generators().isEmpty() + ? 0 + : LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); + if (view.options().columnsAsList()) { + // if we are sending columns as a list, we need to add the list buffers before each column + final SingleElementListHeaderInputStreamGenerator listHeader = + new SingleElementListHeaderInputStreamGenerator(numElements); + listHeader.visitFieldNodes(fieldNodeListener); + listHeader.visitBuffers(bufferListener); + addStream.accept(listHeader); + } + + if (numElements == 0) { + // use an empty generator to publish the column data + try (final RowSet empty = RowSetFactory.empty()) { + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + data.empty(view.options(), empty); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } else { + final ChunkInputStreamGenerator generator = data.generators().get(chunkIdx); + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + generator.getInputStream(view.options(), shift == 0 ? myAddedOffsets : adjustedOffsets); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } + return LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); + } + + ////// WebBarrageUtil + public static final BarrageSnapshotOptions DEFAULT_SNAPSHOT_DESER_OPTIONS = + BarrageSnapshotOptions.builder().build(); + + public static void sendSchema(Consumer stream, Map columnsAndTypes) { + Factory streamGeneratorFactory = new Factory(); + streamGeneratorFactory + .getSchemaView(fbb -> makeTableSchemaPayload(fbb, DEFAULT_SNAPSHOT_DESER_OPTIONS, columnsAndTypes)) + .forEachStream(stream); + } + + private static int makeTableSchemaPayload(FlatBufferBuilder fbb, BarrageSnapshotOptions options, + Map columnsAndTypes) { + int[] fields = new int[columnsAndTypes.size()]; + int i = 0; + for (Map.Entry entry : columnsAndTypes.entrySet()) { + // Unlike BarrageUtil.java, we need to implement this ourselves rather than delegate to Arrow's own types + String name = entry.getKey(); + String type = entry.getValue(); + + // TODO this is wrong for array/vector types + JsDataHandler writer = JsDataHandler.getHandler(type); + if (options.columnsAsList()) { + throw new UnsupportedOperationException("columnsAsList not supported"); + } + + int nameOffset = fbb.createString(name); + int typeOffset = writer.writeType(fbb); + int metadataOffset = Field.createCustomMetadataVector(fbb, new int[] { + KeyValue.createKeyValue(fbb, fbb.createString("deephaven:type"), + fbb.createString(writer.deephavenType())) + }); + + Field.startField(fbb); + Field.addName(fbb, nameOffset); + Field.addNullable(fbb, true); + + Field.addTypeType(fbb, writer.typeType()); + Field.addType(fbb, typeOffset); + Field.addCustomMetadata(fbb, metadataOffset); + + fields[i++] = Field.endField(fbb); + } + + int fieldsOffset = Schema.createFieldsVector(fbb, fields); + + Schema.startSchema(fbb); + Schema.addFields(fbb, fieldsOffset); + return Schema.endSchema(fbb); + } + + public static void sendSnapshot(Consumer stream, BarrageSnapshotOptions options) { + WebBarrageMessage msg = constructMessage(); + WebBarrageStreamGenerator bsg = new WebBarrageStreamGenerator(msg); + bsg.getSnapshotView(options, null).forEachStream(stream); + + } + + private static WebBarrageMessage constructMessage() { + return new WebBarrageMessage();// TODO need args to create this + } +} From d266817309378f9407515a9ab5a0be7a51131857 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 3 Jun 2024 17:08:34 -0500 Subject: [PATCH 042/219] Rebase damage --- .../main/resources/io/deephaven/Util.gwt.xml | 1 + .../chunk/ChunkInputStreamGenerator.java | 82 +++++++++---------- 2 files changed, 42 insertions(+), 41 deletions(-) diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml index a711bf4e95d..ad0478d7bd7 100644 --- a/Util/src/main/resources/io/deephaven/Util.gwt.xml +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -5,6 +5,7 @@ + \ No newline at end of file diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 351944b3587..fb56f6c5215 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -151,33 +151,33 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( if (type == Short.class) { return ShortChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), - rowOffset, date -> { - if (date == null) { - return QueryConstants.NULL_LONG; - } - final long epochDay = date.toEpochDay(); - if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { - throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay - + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); - } - return epochDay * MS_PER_DAY; - }); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), - rowOffset, time -> { - if (time == null) { - return QueryConstants.NULL_LONG; - } - final long nanoOfDay = time.toNanoOfDay(); - if (nanoOfDay < 0) { - throw new IllegalArgumentException("Time out of range: " + time); - } - return nanoOfDay; - }); - } +// if (type == LocalDate.class) { +// return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), +// rowOffset, date -> { +// if (date == null) { +// return QueryConstants.NULL_LONG; +// } +// final long epochDay = date.toEpochDay(); +// if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { +// throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay +// + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); +// } +// return epochDay * MS_PER_DAY; +// }); +// } +// if (type == LocalTime.class) { +// return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), +// rowOffset, time -> { +// if (time == null) { +// return QueryConstants.NULL_LONG; +// } +// final long nanoOfDay = time.toNanoOfDay(); +// if (nanoOfDay < 0) { +// throw new IllegalArgumentException("Time out of range: " + time); +// } +// return nanoOfDay; +// }); +// } // TODO (core#936): support column conversion modes return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, @@ -345,20 +345,20 @@ static WritableChunk extractChunkFromInputStream( Short.BYTES, options, io -> TypeUtils.box(io.readShort()), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } +// if (type == LocalDate.class) { +// return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( +// Long.BYTES, options, +// value -> value == QueryConstants.NULL_LONG +// ? null +// : LocalDate.ofEpochDay(value / MS_PER_DAY), +// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); +// } +// if (type == LocalTime.class) { +// return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( +// Long.BYTES, options, +// value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), +// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); +// } if (type == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, From 978a30a86b8eabc1d9a01f8c3eb26076f5ec83c1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 3 Jun 2024 17:09:22 -0500 Subject: [PATCH 043/219] Start a fresh WBSG --- .../barrage/WebBarrageStreamGenerator.java | 639 ------------------ 1 file changed, 639 deletions(-) delete mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java deleted file mode 100644 index 8e0451367c4..00000000000 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGenerator.java +++ /dev/null @@ -1,639 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.client.api.barrage; - -import com.google.flatbuffers.FlatBufferBuilder; -import elemental2.core.ArrayBufferView; -import elemental2.core.Uint8Array; -import io.deephaven.UncheckedDeephavenException; -import io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.chunk.ChunkType; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.chunk.sized.SizedChunk; -import io.deephaven.chunk.sized.SizedLongChunk; -import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.rowset.RowSetFactory; -import io.deephaven.extensions.barrage.BarrageSnapshotOptions; -import io.deephaven.extensions.barrage.BarrageStreamGeneratorImpl; -import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; -import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; -import io.deephaven.extensions.barrage.util.BarrageUtil; -import io.deephaven.extensions.barrage.util.DefensiveDrainable; -import io.deephaven.extensions.barrage.util.ExposedByteArrayOutputStream; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; -import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.util.datastructures.SizeException; -import io.deephaven.web.client.api.parse.JsDataHandler; -import io.deephaven.web.shared.data.RangeSet; -import io.deephaven.extensions.barrage.util.StreamReaderOptions; -import io.deephaven.web.shared.data.ShiftedRange; -import org.apache.arrow.flatbuf.Buffer; -import org.apache.arrow.flatbuf.Field; -import org.apache.arrow.flatbuf.FieldNode; -import org.apache.arrow.flatbuf.KeyValue; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MetadataVersion; -import org.apache.arrow.flatbuf.RecordBatch; -import org.apache.arrow.flatbuf.Schema; -import org.gwtproject.nio.TypedArrayHelper; -import org.jetbrains.annotations.Nullable; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.BitSet; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import java.util.function.ToIntFunction; - -/** - * Roughly mirrors BarrageStreamGeneratorImpl, except that viewports are not supported, and the grpc-web TS library is - * used for FlightData rather than InputStream to send data to the server. - */ -public class WebBarrageStreamGenerator { - private static final int DEFAULT_INITIAL_BATCH_SIZE = 4096; - private static final int DEFAULT_BATCH_SIZE = 1 << 16; - - private static final int DEFAULT_MESSAGE_SIZE_LIMIT = 100 * 1024 * 1024; - - public interface View { - void forEachStream(Consumer visitor) throws IOException; - - default boolean isViewport() { - return false; - } - - StreamReaderOptions options(); - - int clientMaxMessageSize(); - - RangeSet addRowOffsets(); - - RangeSet modRowOffsets(int col); - } - public class SnapshotView implements View { - private final BarrageSnapshotOptions options; - // private final RangeSet viewport; - // private final boolean reverseViewport; - private final BitSet subscribedColumns; - private final long numAddRows; - private final RangeSet addRowKeys; - private final RangeSet addRowOffsets; - - public SnapshotView(BarrageSnapshotOptions options, - @Nullable final BitSet subscribedColumns) { - this.options = options; - - this.subscribedColumns = subscribedColumns; - - this.addRowKeys = WebBarrageStreamGenerator.this.rowsAdded; - this.addRowOffsets = RangeSet.ofRange(0, addRowKeys.size() - 1); - - this.numAddRows = addRowOffsets.size(); - } - - @Override - public void forEachStream(Consumer visitor) { - ByteBuffer metadata = WebBarrageStreamGenerator.this.getSnapshotMetadata(this); - AtomicLong bytesWritten = new AtomicLong(0); - - int maxBatchSize = options().batchSize(); - if (maxBatchSize <= 0) { - maxBatchSize = DEFAULT_BATCH_SIZE; - } - AtomicInteger actualBatchSize = new AtomicInteger(); - if (numAddRows == 0) { - // we still need to send a message containing metadata when there are no rows - FlightData inputStream = WebBarrageStreamGenerator.this - .getInputStream(this, 0, 0, actualBatchSize, metadata, - WebBarrageStreamGenerator.this::appendAddColumns); - visitor.accept(inputStream); - } else { - // send the add batches - WebBarrageStreamGenerator.this.processBatches(visitor, this, numAddRows, maxBatchSize, metadata, - WebBarrageStreamGenerator.this::appendAddColumns, bytesWritten); - } - } - - @Override - public StreamReaderOptions options() { - return options; - } - - @Override - public int clientMaxMessageSize() { - return options.maxMessageSize(); - } - - @Override - public RangeSet addRowOffsets() { - return addRowOffsets; - } - - @Override - public RangeSet modRowOffsets(int col) { - throw new UnsupportedOperationException("asked for mod row on SnapshotView"); - } - } - public static class SchemaView implements View { - private final ByteBuffer msgBytes; - - public SchemaView(ByteBuffer msgBytes) { - this.msgBytes = msgBytes; - } - - @Override - public void forEachStream(Consumer visitor) { - visitor.accept(new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(msgBytes, 0, msgBytes.l)); - FlightData data = new FlightData(); - ArrayBufferView view = TypedArrayHelper.unwrap(msgBytes); - new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining()); - data.setDataHeader(new Uint8Array(view.buffer, msgBytes.position(), msgBytes.remaining())); - visitor.accept(data); - } - - @Override - public StreamReaderOptions options() { - return null; - } - - @Override - public int clientMaxMessageSize() { - return 0; - } - - @Override - public RangeSet addRowOffsets() { - return null; - } - - @Override - public RangeSet modRowOffsets(int col) { - return null; - } - } - - private interface ColumnVisitor { - int visit(View view, long startRange, int targetBatchSize, Consumer addInputStream, - ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, - ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; - } - - public static class ModColumnGenerator { - final RangeSet rowsModified; - final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data; - - public ModColumnGenerator(RangeSet rowsModified, - BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data) { - this.rowsModified = rowsModified; - this.data = data; - } - } - - public static class Factory { - public WebBarrageStreamGenerator newGenerator(WebBarrageMessage message) { - return new WebBarrageStreamGenerator(message); - } - - public SchemaView getSchemaView(ToIntFunction schemaWriter) { - final FlatBufferBuilder builder = new FlatBufferBuilder(); - final int schemaOffset = schemaWriter.applyAsInt(builder); - Message.startMessage(builder); - Message.addHeaderType(builder, org.apache.arrow.flatbuf.MessageHeader.Schema); - Message.addHeader(builder, schemaOffset); - Message.addVersion(builder, MetadataVersion.V5); - Message.addBodyLength(builder, 0); - builder.finish(Message.endMessage(builder)); - return new SchemaView(builder.dataBuffer()); - } - } - - private final WebBarrageMessage message; - - private final RangeSet rowsAdded; - private final RangeSet rowsRemoved; - private final ShiftedRange[] shifted; - - private final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator[] addColumnData; - private final ModColumnGenerator[] modColumnData; - - public WebBarrageStreamGenerator(WebBarrageMessage message) { - this.message = message; - - this.rowsAdded = message.rowsAdded; - this.rowsRemoved = message.rowsRemoved; - this.shifted = message.shifted; - - addColumnData = new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator[message.addColumnData.length]; - for (int i = 0; i < message.addColumnData.length; i++) { - WebBarrageMessage.AddColumnData columnData = message.addColumnData[i]; - addColumnData[i] = new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator(columnData.chunkType, - columnData.type, columnData.componentType, columnData.data); - } - - modColumnData = new ModColumnGenerator[message.modColumnData.length]; - for (int i = 0; i < modColumnData.length; i++) { - WebBarrageMessage.ModColumnData columnData = message.modColumnData[i]; - modColumnData[i] = new ModColumnGenerator(columnData.rowsModified, - new BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator(columnData.chunkType, columnData.type, - columnData.componentType, columnData.data)); - } - } - - public SnapshotView getSnapshotView(BarrageSnapshotOptions options, BitSet snapshotColumns) { - return new SnapshotView(options, snapshotColumns); - } - - private FlightData getInputStream(View view, long offset, int targetBatchSize, AtomicInteger actualBatchSize, - ByteBuffer metadata, ColumnVisitor columnVisitor) throws IOException { - List streams = new ArrayList<>(); - AtomicInteger size = new AtomicInteger(); - - final Consumer addStream = (final DefensiveDrainable data) -> { - try { - final int sz = data.available(); - if (sz == 0) { - data.close(); - return; - } - - streams.add(data); - size.addAndGet(sz); - } catch (final IOException e) { - throw new UncheckedDeephavenException("Unexpected IOException", e); - } - }; - - - FlatBufferBuilder header = new FlatBufferBuilder(1024); - - final int numRows; - final int nodesOffset; - final int buffersOffset; - try (SizedChunk nodeOffsets = new SizedChunk<>(ChunkType.Object); - SizedLongChunk bufferInfos = new SizedLongChunk<>()) { - nodeOffsets.ensureCapacity(addColumnData.length); - nodeOffsets.get().setSize(0); - bufferInfos.ensureCapacity(addColumnData.length * 3); - bufferInfos.get().setSize(0); - - final AtomicLong totalBufferLength = new AtomicLong(); - final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener = - (numElements, nullCount) -> { - nodeOffsets.ensureCapacityPreserve(nodeOffsets.get().size() + 1); - nodeOffsets.get().asWritableObjectChunk() - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); - }; - - final ChunkInputStreamGenerator.BufferListener bufferListener = (length) -> { - totalBufferLength.addAndGet(length); - bufferInfos.ensureCapacityPreserve(bufferInfos.get().size() + 1); - bufferInfos.get().add(length); - }; - - - numRows = columnVisitor.visit(view, offset, targetBatchSize, addStream, fieldNodeListener, bufferListener); - actualBatchSize.set(numRows); - - final WritableChunk noChunk = nodeOffsets.get(); - RecordBatch.startNodesVector(header, noChunk.size()); - for (int i = noChunk.size() - 1; i >= 0; --i) { - final ChunkInputStreamGenerator.FieldNodeInfo node = - (ChunkInputStreamGenerator.FieldNodeInfo) noChunk.asObjectChunk().get(i); - FieldNode.createFieldNode(header, node.numElements, node.nullCount); - } - nodesOffset = header.endVector(); - - final WritableLongChunk biChunk = bufferInfos.get(); - RecordBatch.startBuffersVector(header, biChunk.size()); - for (int i = biChunk.size() - 1; i >= 0; --i) { - totalBufferLength.addAndGet(-biChunk.get(i)); - Buffer.createBuffer(header, totalBufferLength.longValue(), biChunk.get(i)); - } - buffersOffset = header.endVector(); - } - - - RecordBatch.startRecordBatch(header); - RecordBatch.addNodes(header, nodesOffset); - RecordBatch.addBuffers(header, buffersOffset); - if (view.options().columnsAsList()) { - RecordBatch.addLength(header, 1); - } else { - RecordBatch.addLength(header, numRows); - } - final int headerOffset = RecordBatch.endRecordBatch(header); - - header.finish(wrapInMessage(header, headerOffset, - org.apache.arrow.flatbuf.MessageHeader.RecordBatch, size.intValue())); - -// // now create the proto header -// try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream()) { -// writeHeader(metadata, size, header, baos); -// streams.add(0, -// new BarrageStreamGeneratorImpl.DrainableByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); -// -// return new BarrageStreamGeneratorImpl.ConsecutiveDrainableStreams(streams.toArray(new InputStream[0])); -// } catch (final IOException ex) { -// throw new UncheckedDeephavenException("Unexpected IOException", ex); -// } - FlightData result = new FlightData(); - result.setDataHeader(WebBarrageUtils.bbToUint8ArrayView(header.dataBuffer())); - return result; - } - - public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, final byte headerType, - final int bodyLength) { - Message.startMessage(builder); - Message.addHeaderType(builder, headerType); - Message.addHeader(builder, headerOffset); - Message.addVersion(builder, MetadataVersion.V5); - Message.addBodyLength(builder, bodyLength); - return Message.endMessage(builder); - } - - private ByteBuffer getSnapshotMetadata(SnapshotView view) { - final FlatBufferBuilder metadata = new FlatBufferBuilder(); - - int effectiveViewportOffset = 0; - // if (view.isViewport()) { - // try (final BarrageStreamGeneratorImpl.RowSetGenerator viewportGen = new - // BarrageStreamGeneratorImpl.RowSetGenerator(view.viewport)) { - // effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); - // } - // } - - int effectiveColumnSetOffset = 0; - if (view.subscribedColumns != null) { - int nBits = view.subscribedColumns.previousSetBit(Integer.MAX_VALUE - 1) + 1; - effectiveColumnSetOffset = - metadata.createByteVector(view.subscribedColumns.toByteArray(), 0, (int) ((long) nBits + 7) / 8); - } - - - final int rowsAddedOffset = addRangeSetToFlatBuffer(metadata, rowsAdded); - - // no shifts in a snapshot, but need to provide a valid structure - final int shiftDataOffset = addShiftedToFlatBuffer(metadata, shifted); - - // Added Chunk Data: - int addedRowsIncludedOffset = 0; - // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same - // addedRowsIncludedOffset = rowsIncluded.addToFlatBuffer(view.addRowKeys, metadata); - - BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); - BarrageUpdateMetadata.addIsSnapshot(metadata, true); - BarrageUpdateMetadata.addFirstSeq(metadata, -1); - BarrageUpdateMetadata.addLastSeq(metadata, -1); - BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); - BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); - BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); - BarrageUpdateMetadata.addRemovedRows(metadata, 0); - BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); - BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); - BarrageUpdateMetadata.addModColumnNodes(metadata, 0); - BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, false); - metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); - - final FlatBufferBuilder header = new FlatBufferBuilder(); - final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); - BarrageMessageWrapper.startBarrageMessageWrapper(header); - BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); - BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); - BarrageMessageWrapper.addMsgPayload(header, payloadOffset); - header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); - - return header.dataBuffer().slice(); - } - - private int addRangeSetToFlatBuffer(FlatBufferBuilder builder, RangeSet rangeSet) { - return builder.createByteVector(CompressedRangeSetReader.writeRange(rangeSet)); - } - - private int addShiftedToFlatBuffer(FlatBufferBuilder builder, ShiftedRange[] ranges) { - if (ranges.length > 0) { - throw new UnsupportedOperationException("addShiftedToFlatBuffer"); - } - return 0; - } - - private void processBatches(Consumer visitor, View view, long numRows, int maxBatchSize, - ByteBuffer metadata, ColumnVisitor columnVisitor, AtomicLong bytesWritten) throws IOException { - long offset = 0; - AtomicInteger actualBatchSize = new AtomicInteger(); - - int batchSize = Math.min(DEFAULT_INITIAL_BATCH_SIZE, maxBatchSize); - - int maxMessageSize = view.clientMaxMessageSize() > 0 ? view.clientMaxMessageSize() : DEFAULT_MESSAGE_SIZE_LIMIT; - - while (offset < numRows) { - try { - FlightData is = - getInputStream(view, offset, batchSize, actualBatchSize, metadata, columnVisitor); - int bytesToWrite = is.getAppMetadata_asU8().length + is.getDataHeader_asU8().length + is.getDataBody_asU8().length; - - if (actualBatchSize.get() == 0) { - throw new IllegalStateException("No data was written for a batch"); - } - - // treat this as a hard limit, exceeding fails a client or w2w (unless we are sending a single - // row then we must send and let it potentially fail) - if (bytesToWrite < maxMessageSize || batchSize == 1) { - // let's write the data - visitor.accept(is); - - bytesWritten.addAndGet(bytesToWrite); - offset += actualBatchSize.intValue(); - metadata = null; - } // else, can't write this, we'll retry at the same offset as before - - // recompute the batch limit for the next message - int bytesPerRow = bytesToWrite / actualBatchSize.intValue(); - if (bytesPerRow > 0) { - int rowLimit = maxMessageSize / bytesPerRow; - - // add some margin for abnormal cell contents - batchSize = Math.min(maxBatchSize, Math.max(1, (int) ((double) rowLimit * 0.9))); - } - } catch (SizeException ex) { - // was an overflow in the ChunkInputStream generator (probably VarBinary). We can't compute the - // correct number of rows from this failure, so cut batch size in half and try again. This may - // occur multiple times until the size is restricted properly - if (batchSize == 1) { - // this row exceeds internal limits and can never be sent - throw (new UncheckedDeephavenException( - "BarrageStreamGenerator - single row (" + offset + ") exceeds transmissible size", ex)); - } - final int maximumSize = LongSizedDataStructure.intSize( - "BarrageStreamGenerator", ex.getMaximumSize()); - batchSize = maximumSize >= batchSize ? batchSize / 2 : maximumSize; - } - } - } - - private static int findGeneratorForOffset(final ChunkInputStreamGenerator[] generators, final long offset) { - // fast path for smaller updates - if (generators.length <= 1) { - return 0; - } - - int low = 0; - int high = generators.length; - - while (low + 1 < high) { - int mid = (low + high) / 2; - int cmp = Long.compare(generators[mid].getRowOffset(), offset); - - if (cmp < 0) { - // the generator's first key is low enough - low = mid; - } else if (cmp > 0) { - // the generator's first key is too high - high = mid; - } else { - // first key matches - return mid; - } - } - - // desired generator is at low as the high is exclusive - return low; - } - - - private int appendAddColumns(View view, long startRange, int targetBatchSize, Consumer addStream, - ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, - ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { - if (addColumnData.length == 0) { - return LongSizedDataStructure.intSize("view.addRowOffsets().size()", view.addRowOffsets().size()); - } - - // find the generator for the initial position-space key - long startPos = view.addRowOffsets().get(startRange); - int chunkIdx = findGeneratorForOffset(addColumnData[0].generators, startPos); - - // adjust the batch size if we would cross a chunk boundary - long shift = 0; - long endPos = view.addRowOffsets().get(startRange + targetBatchSize - 1); - if (endPos == RowSet.NULL_ROW_KEY) { - endPos = Long.MAX_VALUE; - } - if (addColumnData[0].generators.length > 0) { - final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators[chunkIdx]; - endPos = Math.min(endPos, tmpGenerator.getLastRowOffset()); - shift = -tmpGenerator.getRowOffset(); - } - - // all column generators have the same boundaries, so we can re-use the offsets internal to this chunkIdx - final RangeSet allowedRange = RangeSet.ofRange(startPos, endPos); - final RangeSet myAddedOffsets = view.addRowOffsets().intersect(allowedRange); - final RangeSet adjustedOffsets = shift == 0 ? null : myAddedOffsets.shift(shift); - // every column must write to the stream - for (final BarrageStreamGeneratorImpl.ChunkListInputStreamGenerator data : addColumnData) { - final int numElements = data.generators.length == 0 - ? 0 - : LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); - if (view.options().columnsAsList()) { - // if we are sending columns as a list, we need to add the list buffers before each column - final SingleElementListHeaderInputStreamGenerator listHeader = - new SingleElementListHeaderInputStreamGenerator(numElements); - listHeader.visitFieldNodes(fieldNodeListener); - listHeader.visitBuffers(bufferListener); - addStream.accept(listHeader); - } - - if (numElements == 0) { - // use an empty generator to publish the column data - try (final RowSet empty = RowSetFactory.empty()) { - final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - data.emptyGenerator.getInputStream(view.options(), empty); - drainableColumn.visitFieldNodes(fieldNodeListener); - drainableColumn.visitBuffers(bufferListener); - - // Add the drainable last as it is allowed to immediately close a row set the visitors need - addStream.accept(drainableColumn); - } - } else { - final ChunkInputStreamGenerator generator = data.generators[chunkIdx]; - final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - generator.getInputStream(view.options(), shift == 0 ? myAddedOffsets : adjustedOffsets); - drainableColumn.visitFieldNodes(fieldNodeListener); - drainableColumn.visitBuffers(bufferListener); - // Add the drainable last as it is allowed to immediately close a row set the visitors need - addStream.accept(drainableColumn); - } - } - return LongSizedDataStructure.intSize("myAddedOffsets.size()", myAddedOffsets.size()); - } - - ////// WebBarrageUtil - public static final BarrageSnapshotOptions DEFAULT_SNAPSHOT_DESER_OPTIONS = - BarrageSnapshotOptions.builder().build(); - - public static void sendSchema(Consumer stream, Map columnsAndTypes) { - Factory streamGeneratorFactory = new Factory(); - streamGeneratorFactory - .getSchemaView(fbb -> makeTableSchemaPayload(fbb, DEFAULT_SNAPSHOT_DESER_OPTIONS, columnsAndTypes)) - .forEachStream(stream); - } - - private static int makeTableSchemaPayload(FlatBufferBuilder fbb, BarrageSnapshotOptions options, - Map columnsAndTypes) { - int[] fields = new int[columnsAndTypes.size()]; - int i = 0; - for (Map.Entry entry : columnsAndTypes.entrySet()) { - // Unlike BarrageUtil.java, we need to implement this ourselves rather than delegate to Arrow's own types - String name = entry.getKey(); - String type = entry.getValue(); - - // TODO this is wrong for array/vector types - JsDataHandler writer = JsDataHandler.getHandler(type); - if (options.columnsAsList()) { - throw new UnsupportedOperationException("columnsAsList not supported"); - } - - int nameOffset = fbb.createString(name); - int typeOffset = writer.writeType(fbb); - int metadataOffset = Field.createCustomMetadataVector(fbb, new int[] { - KeyValue.createKeyValue(fbb, fbb.createString("deephaven:type"), - fbb.createString(writer.deephavenType())) - }); - - Field.startField(fbb); - Field.addName(fbb, nameOffset); - Field.addNullable(fbb, true); - - Field.addTypeType(fbb, writer.typeType()); - Field.addType(fbb, typeOffset); - Field.addCustomMetadata(fbb, metadataOffset); - - fields[i++] = Field.endField(fbb); - } - - int fieldsOffset = Schema.createFieldsVector(fbb, fields); - - Schema.startSchema(fbb); - Schema.addFields(fbb, fieldsOffset); - return Schema.endSchema(fbb); - } - - public static void sendSnapshot(Consumer stream, BarrageSnapshotOptions options) { - WebBarrageMessage msg = constructMessage(); - WebBarrageStreamGenerator bsg = new WebBarrageStreamGenerator(msg); - bsg.getSnapshotView(options, null).forEachStream(stream); - - } - - private static WebBarrageMessage constructMessage() { - return new WebBarrageMessage();// TODO need args to create this - } -} From 74e5e9854ab8fa2845bbb88d92ba994f31800af1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 4 Jun 2024 09:54:36 -0500 Subject: [PATCH 044/219] Use new engine primitive funcs --- .../io/deephaven/engine/primitive/function/Function.gwt.xml | 3 +++ .../resources/io/deephaven/extensions/barrage/Barrage.gwt.xml | 1 + 2 files changed, 4 insertions(+) create mode 100644 engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml diff --git a/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml b/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml new file mode 100644 index 00000000000..ec945398599 --- /dev/null +++ b/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml index a482295bb09..7466050bf21 100644 --- a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml +++ b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml @@ -1,6 +1,7 @@ + From 0c40d3c0d0482bf14e99a2d2071cfff5bf05dbc9 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 4 Jun 2024 09:54:54 -0500 Subject: [PATCH 045/219] spotless --- .../chunk/BaseChunkInputStreamGenerator.java | 2 +- .../chunk/ChunkInputStreamGenerator.java | 188 +++++++++--------- 2 files changed, 95 insertions(+), 95 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java index e68bec57110..a4c1986ebed 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BaseChunkInputStreamGenerator.java @@ -89,7 +89,7 @@ abstract class BaseChunkInputStream extends DrainableColumn { this.options = options; this.subset = chunk.size() == 0 ? RowSequenceFactory.EMPTY : subset != null ? subset.copy() : RowSequenceFactory.forRange(0, chunk.size() - 1); -// REFERENCE_COUNT_UPDATER.incrementAndGet(BaseChunkInputStreamGenerator.this); + // REFERENCE_COUNT_UPDATER.incrementAndGet(BaseChunkInputStreamGenerator.this); // ignore the empty chunk as these are intentionally empty generators that should work for any subset if (chunk.size() > 0 && this.subset.lastRowKey() >= chunk.size()) { throw new IllegalStateException( diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index fb56f6c5215..caac27f991c 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -151,33 +151,33 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( if (type == Short.class) { return ShortChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); } -// if (type == LocalDate.class) { -// return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), -// rowOffset, date -> { -// if (date == null) { -// return QueryConstants.NULL_LONG; -// } -// final long epochDay = date.toEpochDay(); -// if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { -// throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay -// + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); -// } -// return epochDay * MS_PER_DAY; -// }); -// } -// if (type == LocalTime.class) { -// return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), -// rowOffset, time -> { -// if (time == null) { -// return QueryConstants.NULL_LONG; -// } -// final long nanoOfDay = time.toNanoOfDay(); -// if (nanoOfDay < 0) { -// throw new IllegalArgumentException("Time out of range: " + time); -// } -// return nanoOfDay; -// }); -// } + // if (type == LocalDate.class) { + // return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), + // rowOffset, date -> { + // if (date == null) { + // return QueryConstants.NULL_LONG; + // } + // final long epochDay = date.toEpochDay(); + // if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { + // throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay + // + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); + // } + // return epochDay * MS_PER_DAY; + // }); + // } + // if (type == LocalTime.class) { + // return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), + // rowOffset, time -> { + // if (time == null) { + // return QueryConstants.NULL_LONG; + // } + // final long nanoOfDay = time.toNanoOfDay(); + // if (nanoOfDay < 0) { + // throw new IllegalArgumentException("Time out of range: " + time); + // } + // return nanoOfDay; + // }); + // } // TODO (core#936): support column conversion modes return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, @@ -257,59 +257,59 @@ static WritableChunk extractChunkFromInputStream( options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } } -// if (Vector.class.isAssignableFrom(type)) { -// // noinspection unchecked -// return VectorChunkInputStreamGenerator.extractChunkFromInputStream( -// options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, -// outChunk, outOffset, totalRows); -// } -// if (type == BigInteger.class) { -// return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( -// is, -// fieldNodeIter, -// bufferInfoIter, -// BigInteger::new, -// outChunk, outOffset, totalRows); -// } -// if (type == BigDecimal.class) { -// return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( -// is, -// fieldNodeIter, -// bufferInfoIter, -// (final byte[] buf, final int offset, final int length) -> { -// // read the int scale value as little endian, arrow's endianness. -// final byte b1 = buf[offset]; -// final byte b2 = buf[offset + 1]; -// final byte b3 = buf[offset + 2]; -// final byte b4 = buf[offset + 3]; -// final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); -// return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); -// }, -// outChunk, outOffset, totalRows); -// } -// if (type == Instant.class) { -// return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( -// Long.BYTES, options, io -> { -// final long value = io.readLong(); -// if (value == QueryConstants.NULL_LONG) { -// return null; -// } -// return DateTimeUtils.epochNanosToInstant(value * factor); -// }, -// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); -// } -// if (type == ZonedDateTime.class) { -// return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( -// Long.BYTES, options, io -> { -// final long value = io.readLong(); -// if (value == QueryConstants.NULL_LONG) { -// return null; -// } -// return DateTimeUtils.epochNanosToZonedDateTime( -// value * factor, DateTimeUtils.timeZone()); -// }, -// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); -// } + // if (Vector.class.isAssignableFrom(type)) { + // // noinspection unchecked + // return VectorChunkInputStreamGenerator.extractChunkFromInputStream( + // options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, + // outChunk, outOffset, totalRows); + // } + // if (type == BigInteger.class) { + // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + // is, + // fieldNodeIter, + // bufferInfoIter, + // BigInteger::new, + // outChunk, outOffset, totalRows); + // } + // if (type == BigDecimal.class) { + // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + // is, + // fieldNodeIter, + // bufferInfoIter, + // (final byte[] buf, final int offset, final int length) -> { + // // read the int scale value as little endian, arrow's endianness. + // final byte b1 = buf[offset]; + // final byte b2 = buf[offset + 1]; + // final byte b3 = buf[offset + 2]; + // final byte b4 = buf[offset + 3]; + // final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + // return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + // }, + // outChunk, outOffset, totalRows); + // } + // if (type == Instant.class) { + // return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + // Long.BYTES, options, io -> { + // final long value = io.readLong(); + // if (value == QueryConstants.NULL_LONG) { + // return null; + // } + // return DateTimeUtils.epochNanosToInstant(value * factor); + // }, + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } + // if (type == ZonedDateTime.class) { + // return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + // Long.BYTES, options, io -> { + // final long value = io.readLong(); + // if (value == QueryConstants.NULL_LONG) { + // return null; + // } + // return DateTimeUtils.epochNanosToZonedDateTime( + // value * factor, DateTimeUtils.timeZone()); + // }, + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } if (type == Byte.class) { return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), @@ -345,20 +345,20 @@ static WritableChunk extractChunkFromInputStream( Short.BYTES, options, io -> TypeUtils.box(io.readShort()), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } -// if (type == LocalDate.class) { -// return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( -// Long.BYTES, options, -// value -> value == QueryConstants.NULL_LONG -// ? null -// : LocalDate.ofEpochDay(value / MS_PER_DAY), -// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); -// } -// if (type == LocalTime.class) { -// return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( -// Long.BYTES, options, -// value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), -// fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); -// } + // if (type == LocalDate.class) { + // return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + // Long.BYTES, options, + // value -> value == QueryConstants.NULL_LONG + // ? null + // : LocalDate.ofEpochDay(value / MS_PER_DAY), + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } + // if (type == LocalTime.class) { + // return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + // Long.BYTES, options, + // value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + // fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + // } if (type == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, From 036867c14978d538d282b2b8a3caf836b657785d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 5 Jun 2024 10:40:51 -0500 Subject: [PATCH 046/219] web impl of BSGI, compiles, untested --- .../extensions/barrage/Barrage.gwt.xml | 2 +- .../api/barrage/ShiftedRangeReader.java | 27 +- .../WebBarrageStreamGeneratorImpl.java | 706 ++++++++++++++++++ .../api/barrage/WebBarrageStreamReader.java | 2 +- .../client/api/barrage/WebBarrageUtils.java | 3 +- .../deephaven/engine/rowset/RowSequence.java | 2 +- .../engine/rowset/RowSequenceFactory.java | 7 +- .../io/deephaven/engine/rowset/RowSet.java | 3 + .../engine/rowset/RowSetFactory.java | 13 +- .../WebRowSetBuilderSequentialImpl.java | 21 + .../engine/rowset/WebRowSetImpl.java | 89 +++ .../engine/rowset/WritableRowSet.java | 8 + .../deephaven/web/shared/data/RangeSet.java | 6 + 13 files changed, 880 insertions(+), 9 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGeneratorImpl.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java diff --git a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml index 7466050bf21..082a3d35a6f 100644 --- a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml +++ b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml @@ -3,7 +3,7 @@ - + \ No newline at end of file diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java index ac2b7524e6b..3c1c68fc3bb 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java @@ -12,7 +12,7 @@ public class ShiftedRangeReader { - public ShiftedRange[] read(ByteBuffer data) { + public static ShiftedRange[] read(ByteBuffer data) { RangeSet start = new CompressedRangeSetReader().read(data); RangeSet end = new CompressedRangeSetReader().read(data); RangeSet postShiftStart = new CompressedRangeSetReader().read(data); @@ -30,4 +30,29 @@ public ShiftedRange[] read(ByteBuffer data) { return ranges; } + + public static ByteBuffer write(ShiftedRange[] shiftedRanges) { + RangeSet start = new RangeSet(); + RangeSet end = new RangeSet(); + RangeSet postShiftStart = new RangeSet(); + + for (int i = 0; i < shiftedRanges.length; i++) { + ShiftedRange range = shiftedRanges[i]; + long first = range.getRange().getFirst(); + long last = range.getRange().getLast(); + long delta = range.getDelta() + first; + start.addRange(new Range(first, first)); + end.addRange(new Range(last, last)); + postShiftStart.addRange(new Range(delta, delta)); + } + + ByteBuffer startBuf = CompressedRangeSetReader.writeRange(start); + ByteBuffer endBuf = CompressedRangeSetReader.writeRange(end); + ByteBuffer shiftBuf = CompressedRangeSetReader.writeRange(postShiftStart); + ByteBuffer all = ByteBuffer.allocateDirect(startBuf.remaining() + endBuf.remaining() + shiftBuf.remaining()); + all.put(startBuf); + all.put(endBuf); + all.put(shiftBuf); + return all; + } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGeneratorImpl.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGeneratorImpl.java new file mode 100644 index 00000000000..1c0055599e5 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamGeneratorImpl.java @@ -0,0 +1,706 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage; + + +import com.google.flatbuffers.FlatBufferBuilder; +import elemental2.core.ArrayBufferView; +import elemental2.core.Uint8Array; +import io.deephaven.UncheckedDeephavenException; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; +import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.chunk.sized.SizedChunk; +import io.deephaven.chunk.sized.SizedLongChunk; +import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.WritableRowSet; +import io.deephaven.extensions.barrage.BarrageSnapshotOptions; +import io.deephaven.extensions.barrage.ChunkListInputStreamGenerator; +import io.deephaven.extensions.barrage.DrainableByteArrayInputStream; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; +import io.deephaven.extensions.barrage.util.BarrageUtil; +import io.deephaven.extensions.barrage.util.DefensiveDrainable; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.util.SafeCloseable; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.util.datastructures.SizeException; +import io.deephaven.util.mutable.MutableInt; +import io.deephaven.util.mutable.MutableLong; +import io.deephaven.web.client.api.parse.JsDataHandler; +import io.deephaven.web.shared.data.RangeSet; +import io.deephaven.web.shared.data.ShiftedRange; +import org.apache.arrow.flatbuf.Buffer; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.FieldNode; +import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; +import org.apache.arrow.flatbuf.MessageHeader; +import org.apache.arrow.flatbuf.MetadataVersion; +import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; +import org.gwtproject.nio.TypedArrayHelper; +import org.jetbrains.annotations.Nullable; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.ToIntFunction; + +import static io.deephaven.extensions.barrage.chunk.BaseChunkInputStreamGenerator.PADDING_BUFFER; + +public class WebBarrageStreamGeneratorImpl { + private static final int DEFAULT_INITIAL_BATCH_SIZE = 4096; + private static final int DEFAULT_BATCH_SIZE = 1 << 16; + + private static final int DEFAULT_MESSAGE_SIZE_LIMIT = 100 * 1024 * 1024; + + private native RowSet wrap(RangeSet rangeSet) /*-{ + return @io.deephaven.engine.rowset.WebRowSetImpl::new(Lio/deephaven/web/shared/data/RangeSet;)(rangeSet); + }-*/; + + private native RangeSet unwrap(RowSet rowSet) /*-{ + return rowSet.@io.deephaven.engine.rowset.WebRowSetImpl::rangeSet; + }-*/; + + public interface MessageView { + List toFlightDataMessage() throws IOException; + } + + public interface RecordBatchMessageView extends MessageView { + // void forEachStream(Consumer visitor) throws IOException; + + boolean isViewport(); + + StreamReaderOptions options(); + + RowSet addRowOffsets(); + + RowSet modRowOffsets(int col); + } + + private static final class SchemaMessageView implements MessageView { + private final FlightData message; + + public SchemaMessageView(final ByteBuffer buffer) { + ArrayBufferView view = TypedArrayHelper.unwrap(buffer); + message = new FlightData(); + message.setDataHeader(new Uint8Array(view.buffer, buffer.position(), buffer.remaining())); + } + + @Override + public List toFlightDataMessage() throws IOException { + return Collections.singletonList(message); + } + } + + private final class SnapshotView implements RecordBatchMessageView { + private final BarrageSnapshotOptions options; + private final long numAddRows; + private final RowSet addRowKeys; + private final RowSet addRowOffsets; + + public SnapshotView(final BarrageSnapshotOptions options, + @Nullable final RowSet viewportIgnored, + final boolean reverseViewportIgnored, + @Nullable final Void keyspaceViewportIgnored, + @Nullable final BitSet subscribedColumnsIgnored) { + this.options = options; + // this.viewport = viewportIgnored; + // this.reverseViewport = reverseViewportIgnored; + + // this.subscribedColumns = subscribedColumnsIgnored; + + // precompute add row offsets (no viewport support) + addRowKeys = rowsAdded.copy(); + addRowOffsets = RowSetFactory.flat(addRowKeys.size()); + + + numAddRows = addRowOffsets.size(); + } + + @Override + public List toFlightDataMessage() throws IOException { + List messages = new ArrayList<>(); + ByteBuffer metadata = getSnapshotMetadata(); + MutableLong bytesWritten = new MutableLong(0L); + + // batch size is maximum, will write fewer rows when needed + int maxBatchSize = batchSize(); + final MutableInt actualBatchSize = new MutableInt(); + if (numAddRows == 0) { + // we still need to send a message containing metadata when there are no rows + messages.add(getInputStream(this, 0, 0, actualBatchSize, metadata, + WebBarrageStreamGeneratorImpl.this::appendAddColumns)); + } else { + // send the add batches + processBatches(messages::add, this, numAddRows, maxBatchSize, metadata, + WebBarrageStreamGeneratorImpl.this::appendAddColumns, bytesWritten); + } + addRowOffsets.close(); + addRowKeys.close(); + // writeConsumer.onWrite(bytesWritten.get(), System.nanoTime() - startTm); + return messages; + } + + private int batchSize() { + int batchSize = options().batchSize(); + if (batchSize <= 0) { + batchSize = DEFAULT_BATCH_SIZE; + } + return batchSize; + } + + @Override + public boolean isViewport() { + return false; + } + + @Override + public StreamReaderOptions options() { + return options; + } + + @Override + public RowSet addRowOffsets() { + return addRowOffsets; + } + + @Override + public RowSet modRowOffsets(int col) { + throw new UnsupportedOperationException("asked for mod row on SnapshotView"); + } + + private ByteBuffer getSnapshotMetadata() throws IOException { + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int effectiveViewportOffset = 0; + if (isViewport()) { + // try (final RowSetGenerator viewportGen = new RowSetGenerator(viewport)) { + // effectiveViewportOffset = viewportGen.addToFlatBuffer(metadata); + // } + } + + int effectiveColumnSetOffset = 0; + // if (subscribedColumns != null) { + // effectiveColumnSetOffset = new BitSetGenerator(subscribedColumns).addToFlatBuffer(metadata); + // } + + final int rowsAddedOffset = addToFlatBuffer(rowsAdded, metadata); + + // no shifts in a snapshot, but need to provide a valid structure + final int shiftDataOffset = addToFlatBuffer(shifted, metadata); + + // Added Chunk Data: + int addedRowsIncludedOffset = 0; + // don't send `rowsIncluded` when identical to `rowsAdded`, client will infer they are the same + // if (isSnapshot || !addRowKeys.equals(rowsAdded)) { + // addedRowsIncludedOffset = addToFlatBuffer(rowsIncluded, addRowKeys, metadata); + // } + + BarrageUpdateMetadata.startBarrageUpdateMetadata(metadata); + // BarrageUpdateMetadata.addIsSnapshot(metadata, isSnapshot); + // BarrageUpdateMetadata.addFirstSeq(metadata, firstSeq); + // BarrageUpdateMetadata.addLastSeq(metadata, lastSeq); + BarrageUpdateMetadata.addEffectiveViewport(metadata, effectiveViewportOffset); + BarrageUpdateMetadata.addEffectiveColumnSet(metadata, effectiveColumnSetOffset); + BarrageUpdateMetadata.addAddedRows(metadata, rowsAddedOffset); + BarrageUpdateMetadata.addRemovedRows(metadata, 0); + BarrageUpdateMetadata.addShiftData(metadata, shiftDataOffset); + BarrageUpdateMetadata.addAddedRowsIncluded(metadata, addedRowsIncludedOffset); + BarrageUpdateMetadata.addModColumnNodes(metadata, 0); + // BarrageUpdateMetadata.addEffectiveReverseViewport(metadata, reverseViewport); + metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); + + final FlatBufferBuilder header = new FlatBufferBuilder(); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + BarrageMessageWrapper.startBarrageMessageWrapper(header); + BarrageMessageWrapper.addMagic(header, BarrageUtil.FLATBUFFER_MAGIC); + BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); + BarrageMessageWrapper.addMsgPayload(header, payloadOffset); + header.finish(BarrageMessageWrapper.endBarrageMessageWrapper(header)); + + return header.dataBuffer().slice(); + } + } + + private int addToFlatBuffer(ShiftedRange[] shifted, FlatBufferBuilder metadata) { + return 0; + } + + private int addToFlatBuffer(RowSet rowSet, FlatBufferBuilder metadata) { + RangeSet rangeSet = unwrap(rowSet); + return metadata.createByteVector(CompressedRangeSetReader.writeRange(rangeSet)); + } + + public static class ModColumnGenerator implements SafeCloseable { + public final RangeSet rowsModified; + public final ChunkListInputStreamGenerator data; + + public ModColumnGenerator(final WebBarrageMessage.ModColumnData col) throws IOException { + rowsModified = col.rowsModified; + data = new ChunkListInputStreamGenerator(col.type, col.componentType, col.data, col.chunkType); + } + + @Override + public void close() { + data.close(); + } + } + + static class Factory { + WebBarrageStreamGeneratorImpl newGenerator(WebBarrageMessage message) throws IOException { + return new WebBarrageStreamGeneratorImpl(message); + } + + SchemaMessageView getSchemaView(ToIntFunction schemaWriter) { + final FlatBufferBuilder builder = new FlatBufferBuilder(); + final int schemaOffset = schemaWriter.applyAsInt(builder); + Message.startMessage(builder); + Message.addHeaderType(builder, org.apache.arrow.flatbuf.MessageHeader.Schema); + Message.addHeader(builder, schemaOffset); + Message.addVersion(builder, MetadataVersion.V5); + Message.addBodyLength(builder, 0); + builder.finish(Message.endMessage(builder)); + return new SchemaMessageView(builder.dataBuffer()); + } + } + + private final WebBarrageMessage message; + + private final boolean isSnapshot; + + private final RowSet rowsAdded; + private final RowSet rowsRemoved; + private final ShiftedRange[] shifted; + + private final ChunkListInputStreamGenerator[] addColumnData; + private final ModColumnGenerator[] modColumnData; + + public WebBarrageStreamGeneratorImpl(WebBarrageMessage message) throws IOException { + this.message = message; + this.isSnapshot = message.isSnapshot; + assert isSnapshot : "isSnapshot must be true at this time"; + + this.rowsAdded = wrap(message.rowsAdded); + this.rowsRemoved = wrap(message.rowsRemoved); + this.shifted = message.shifted; + + addColumnData = new ChunkListInputStreamGenerator[message.addColumnData.length]; + for (int i = 0; i < message.addColumnData.length; i++) { + WebBarrageMessage.AddColumnData columnData = message.addColumnData[i]; + addColumnData[i] = new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, + columnData.data, columnData.chunkType); + } + + modColumnData = new ModColumnGenerator[message.modColumnData.length]; + for (int i = 0; i < modColumnData.length; i++) { + WebBarrageMessage.ModColumnData columnData = message.modColumnData[i]; + modColumnData[i] = new ModColumnGenerator(columnData); + } + } + + /** + * Obtain a View of this StreamGenerator that can be sent to a single snapshot requestor. + * + * @param options serialization options for this specific view + * @param viewport is the position-space viewport + * @param reverseViewport is the viewport reversed (relative to end of table instead of beginning) + * @param keyspaceViewport is the key-space viewport + * @param snapshotColumns are the columns subscribed for this view + * @return a MessageView filtered by the snapshot properties that can be sent to that subscriber + */ + private MessageView getSnapshotView(final BarrageSnapshotOptions options, + @Nullable final RowSet viewport, + final boolean reverseViewport, + @Nullable final RowSet keyspaceViewport, + @Nullable final BitSet snapshotColumns) { + return new SnapshotView(options, null, false, null, null); + } + + /** + * Obtain a Full-Snapshot View of this StreamGenerator that can be sent to a single snapshot requestor. + * + * @param options serialization options for this specific view + * @return a MessageView filtered by the snapshot properties that can be sent to that subscriber + */ + public MessageView getSnapshotView(BarrageSnapshotOptions options) { + return getSnapshotView(options, null, false, null, null); + } + + @FunctionalInterface + private interface ColumnVisitor { + int visit(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; + } + + /** + * Returns an InputStream of a single FlightData message filtered to the viewport. This function accepts + * `targetBatchSize` but may actually write fewer rows than the target (when crossing an internal chunk boundary, + * e.g.) + * + * @param view the view of the overall chunk to generate a RecordBatch for + * @param offset the start of the batch in position space w.r.t. the view (inclusive) + * @param targetBatchSize the target (and maximum) batch size to use for this message + * @param actualBatchSize the number of rows actually sent in this batch (will be <= targetBatchSize) + * @param metadata the optional flight data metadata to attach to the message + * @param columnVisitor the helper method responsible for appending the payload columns to the RecordBatch + * @return a single FlightData message + */ + private FlightData getInputStream(final RecordBatchMessageView view, final long offset, + final int targetBatchSize, + final MutableInt actualBatchSize, final ByteBuffer metadata, final ColumnVisitor columnVisitor) + throws IOException { + final ArrayDeque streams = new ArrayDeque<>(); + final MutableInt size = new MutableInt(); + + final Consumer addStream = (final DefensiveDrainable is) -> { + try { + final int sz = is.available(); + if (sz == 0) { + is.close(); + return; + } + + streams.add(is); + size.add(sz); + } catch (final IOException e) { + throw new UncheckedDeephavenException("Unexpected IOException", e); + } + + // These buffers must be aligned to an 8-byte boundary in order for efficient alignment in languages like + // C++. + if (size.get() % 8 != 0) { + final int paddingBytes = (8 - (size.get() % 8)); + size.add(paddingBytes); + streams.add(new DrainableByteArrayInputStream(PADDING_BUFFER, 0, paddingBytes)); + } + }; + + final FlatBufferBuilder header = new FlatBufferBuilder(); + + final int numRows; + final int nodesOffset; + final int buffersOffset; + try (final SizedChunk nodeOffsets = new SizedChunk<>(ChunkType.Object); + final SizedLongChunk bufferInfos = new SizedLongChunk<>()) { + nodeOffsets.ensureCapacity(addColumnData.length); + nodeOffsets.get().setSize(0); + bufferInfos.ensureCapacity(addColumnData.length * 3); + bufferInfos.get().setSize(0); + + final MutableLong totalBufferLength = new MutableLong(); + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener = + (numElements, nullCount) -> { + nodeOffsets.ensureCapacityPreserve(nodeOffsets.get().size() + 1); + nodeOffsets.get().asWritableObjectChunk() + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); + }; + + final ChunkInputStreamGenerator.BufferListener bufferListener = (length) -> { + totalBufferLength.add(length); + bufferInfos.ensureCapacityPreserve(bufferInfos.get().size() + 1); + bufferInfos.get().add(length); + }; + + numRows = columnVisitor.visit(view, offset, targetBatchSize, addStream, fieldNodeListener, bufferListener); + actualBatchSize.set(numRows); + + final WritableChunk noChunk = nodeOffsets.get(); + RecordBatch.startNodesVector(header, noChunk.size()); + for (int i = noChunk.size() - 1; i >= 0; --i) { + final ChunkInputStreamGenerator.FieldNodeInfo node = + (ChunkInputStreamGenerator.FieldNodeInfo) noChunk.asObjectChunk().get(i); + FieldNode.createFieldNode(header, node.numElements, node.nullCount); + } + nodesOffset = header.endVector(); + + final WritableLongChunk biChunk = bufferInfos.get(); + RecordBatch.startBuffersVector(header, biChunk.size()); + for (int i = biChunk.size() - 1; i >= 0; --i) { + totalBufferLength.subtract(biChunk.get(i)); + Buffer.createBuffer(header, totalBufferLength.get(), biChunk.get(i)); + } + buffersOffset = header.endVector(); + } + + RecordBatch.startRecordBatch(header); + RecordBatch.addNodes(header, nodesOffset); + RecordBatch.addBuffers(header, buffersOffset); + if (view.options().columnsAsList()) { + RecordBatch.addLength(header, 1); + } else { + RecordBatch.addLength(header, numRows); + } + final int headerOffset = RecordBatch.endRecordBatch(header); + + header.finish(Message.createMessage(header, MetadataVersion.V5, MessageHeader.RecordBatch, headerOffset, + size.get(), 0)); + // header.finish(wrapInMessage(header, headerOffset, + // org.apache.arrow.flatbuf.MessageHeader.RecordBatch, size.get())); + + FlightData flightData = new FlightData(); + flightData.setDataHeader(WebBarrageUtils.bbToUint8ArrayView(header.dataBuffer().slice())); + flightData.setAppMetadata(WebBarrageUtils.bbToUint8ArrayView(metadata)); + int sum = 0; + for (DefensiveDrainable stream : streams) { + int available = stream.available(); + sum += available; + } + ByteBuffer dataBody = ByteBuffer.allocateDirect(sum); + + // ByteBufferOutputStream outputStream = new ByteBufferOutputStream(dataBody, new NullByteBufferSink()); + for (DefensiveDrainable d : streams) { + d.drainTo(new OutputStream() { + @Override + public void write(int b) throws IOException { + dataBody.put((byte) b); + } + }); + } + dataBody.flip(); + flightData.setDataBody(WebBarrageUtils.bbToUint8ArrayView(dataBody)); + return flightData; + } + + private void processBatches(Consumer visitor, final RecordBatchMessageView view, + final long numRows, final int maxBatchSize, ByteBuffer metadata, + final ColumnVisitor columnVisitor, final MutableLong bytesWritten) throws IOException { + long offset = 0; + MutableInt actualBatchSize = new MutableInt(); + + int batchSize = Math.min(DEFAULT_INITIAL_BATCH_SIZE, maxBatchSize); + + // allow the client to override the default message size + int clientMaxMessageSize = view.options().maxMessageSize(); + final int maxMessageSize = clientMaxMessageSize > 0 ? clientMaxMessageSize : DEFAULT_MESSAGE_SIZE_LIMIT; + + // TODO (deephaven-core#188): remove this when JS API can accept multiple batches + boolean sendAllowed = numRows <= batchSize; + + while (offset < numRows) { + try { + final FlightData is = + getInputStream(view, offset, batchSize, actualBatchSize, metadata, columnVisitor); + int approxBytesToWrite = is.getAppMetadata().asUint8Array().length + + is.getDataHeader().asUint8Array().length + is.getDataBody().asUint8Array().length;// is.available(); + + if (actualBatchSize.get() == 0) { + throw new IllegalStateException("No data was written for a batch"); + } + + // treat this as a hard limit, exceeding fails a client or w2w (unless we are sending a single + // row then we must send and let it potentially fail) + if (sendAllowed && (approxBytesToWrite < maxMessageSize || batchSize == 1)) { + // let's write the data + visitor.accept(is); + + bytesWritten.add(approxBytesToWrite); + offset += actualBatchSize.get(); + metadata = null; + } else { + // can't write this, so close the input stream and retry + // is.close(); + sendAllowed = true; + } + + // recompute the batch limit for the next message + int bytesPerRow = approxBytesToWrite / actualBatchSize.get(); + if (bytesPerRow > 0) { + int rowLimit = maxMessageSize / bytesPerRow; + + // add some margin for abnormal cell contents + batchSize = Math.min(maxBatchSize, Math.max(1, (int) ((double) rowLimit * 0.9))); + } + } catch (SizeException ex) { + // was an overflow in the ChunkInputStream generator (probably VarBinary). We can't compute the + // correct number of rows from this failure, so cut batch size in half and try again. This may + // occur multiple times until the size is restricted properly + if (batchSize == 1) { + // this row exceeds internal limits and can never be sent + throw (new UncheckedDeephavenException( + "BarrageStreamGenerator - single row (" + offset + ") exceeds transmissible size", ex)); + } + final int maximumSize = LongSizedDataStructure.intSize( + "BarrageStreamGenerator", ex.getMaximumSize()); + batchSize = maximumSize >= batchSize ? batchSize / 2 : maximumSize; + } + } + } + + private static int findGeneratorForOffset(final List generators, final long offset) { + // fast path for smaller updates + if (generators.isEmpty()) { + return 0; + } + + int low = 0; + int high = generators.size(); + + while (low + 1 < high) { + int mid = (low + high) / 2; + int cmp = Long.compare(generators.get(mid).getRowOffset(), offset); + + if (cmp < 0) { + // the generator's first key is low enough + low = mid; + } else if (cmp > 0) { + // the generator's first key is too high + high = mid; + } else { + // first key matches + return mid; + } + } + + // desired generator is at low as the high is exclusive + return low; + } + + private int appendAddColumns(final RecordBatchMessageView view, final long startRange, final int targetBatchSize, + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { + if (addColumnData.length == 0) { + return view.addRowOffsets().intSize(); + } + + // find the generator for the initial position-space key + long startPos = view.addRowOffsets().get(startRange); + int chunkIdx = findGeneratorForOffset(addColumnData[0].generators(), startPos); + + // adjust the batch size if we would cross a chunk boundary + long shift = 0; + long endPos = view.addRowOffsets().get(startRange + targetBatchSize - 1); + if (endPos == RowSet.NULL_ROW_KEY) { + endPos = Long.MAX_VALUE; + } + if (!addColumnData[0].generators().isEmpty()) { + final ChunkInputStreamGenerator tmpGenerator = addColumnData[0].generators().get(chunkIdx); + endPos = Math.min(endPos, tmpGenerator.getLastRowOffset()); + shift = -tmpGenerator.getRowOffset(); + } + + // all column generators have the same boundaries, so we can re-use the offsets internal to this chunkIdx + try (final RowSet allowedRange = RowSetFactory.fromRange(startPos, endPos); + final WritableRowSet myAddedOffsets = view.addRowOffsets().intersect(allowedRange); + final RowSet adjustedOffsets = shift == 0 ? null : myAddedOffsets.shift(shift)) { + // every column must write to the stream + for (final ChunkListInputStreamGenerator data : addColumnData) { + final int numElements = data.generators().isEmpty() + ? 0 + : myAddedOffsets.intSize("BarrageStreamGenerator"); + if (view.options().columnsAsList()) { + // if we are sending columns as a list, we need to add the list buffers before each column + final SingleElementListHeaderInputStreamGenerator listHeader = + new SingleElementListHeaderInputStreamGenerator(numElements); + listHeader.visitFieldNodes(fieldNodeListener); + listHeader.visitBuffers(bufferListener); + addStream.accept(listHeader); + } + + if (numElements == 0) { + // use an empty generator to publish the column data + try (final RowSet empty = RowSetFactory.empty()) { + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + data.empty(view.options(), empty); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } else { + final ChunkInputStreamGenerator generator = data.generators().get(chunkIdx); + final ChunkInputStreamGenerator.DrainableColumn drainableColumn = + generator.getInputStream(view.options(), shift == 0 ? myAddedOffsets : adjustedOffsets); + drainableColumn.visitFieldNodes(fieldNodeListener); + drainableColumn.visitBuffers(bufferListener); + // Add the drainable last as it is allowed to immediately close a row set the visitors need + addStream.accept(drainableColumn); + } + } + return myAddedOffsets.intSize(); + } + } + + + + ////// WebBarrageUtil + public static final BarrageSnapshotOptions DEFAULT_SNAPSHOT_DESER_OPTIONS = + BarrageSnapshotOptions.builder().build(); + + public static List sendSchema(Map columnsAndTypes) throws IOException { + Factory streamGeneratorFactory = new Factory(); + return streamGeneratorFactory + .getSchemaView(fbb -> makeTableSchemaPayload(fbb, DEFAULT_SNAPSHOT_DESER_OPTIONS, columnsAndTypes)) + .toFlightDataMessage(); + } + + private static int makeTableSchemaPayload(FlatBufferBuilder fbb, BarrageSnapshotOptions options, + Map columnsAndTypes) { + int[] fields = new int[columnsAndTypes.size()]; + int i = 0; + for (Map.Entry entry : columnsAndTypes.entrySet()) { + // Unlike BarrageUtil.java, we need to implement this ourselves rather than delegate to Arrow's own types + String name = entry.getKey(); + String type = entry.getValue(); + + // TODO this is wrong for array/vector types + JsDataHandler writer = JsDataHandler.getHandler(type); + if (options.columnsAsList()) { + throw new UnsupportedOperationException("columnsAsList not supported"); + } + + int nameOffset = fbb.createString(name); + int typeOffset = writer.writeType(fbb); + int metadataOffset = Field.createCustomMetadataVector(fbb, new int[] { + KeyValue.createKeyValue(fbb, fbb.createString("deephaven:type"), + fbb.createString(writer.deephavenType())) + }); + + Field.startField(fbb); + Field.addName(fbb, nameOffset); + Field.addNullable(fbb, true); + + Field.addTypeType(fbb, writer.typeType()); + Field.addType(fbb, typeOffset); + Field.addCustomMetadata(fbb, metadataOffset); + + fields[i++] = Field.endField(fbb); + } + + int fieldsOffset = Schema.createFieldsVector(fbb, fields); + + Schema.startSchema(fbb); + Schema.addFields(fbb, fieldsOffset); + return Schema.endSchema(fbb); + } + + public static void sendSnapshot(Consumer stream, BarrageSnapshotOptions options) throws IOException { + WebBarrageMessage msg = constructMessage(); + WebBarrageStreamGeneratorImpl bsg = new WebBarrageStreamGeneratorImpl(msg); + bsg.getSnapshotView(options).toFlightDataMessage().forEach(stream); + + } + + private static WebBarrageMessage constructMessage() { + return new WebBarrageMessage();// TODO need args to create this + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index 06f6625e085..41de16e671f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -271,6 +271,6 @@ private static BitSet extractBitSet(final ByteBuffer bb) { } private static ShiftedRange[] extractIndexShiftData(final ByteBuffer bb) { - return new ShiftedRangeReader().read(bb); + return ShiftedRangeReader.read(bb); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index 4d9bfd93fc4..e0cacec2e51 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -243,8 +243,7 @@ public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewpo deltaUpdates.setRemoved(new CompressedRangeSetReader() .read(barrageUpdate.removedRowsAsByteBuffer())); - deltaUpdates.setShiftedRanges( - new ShiftedRangeReader().read(barrageUpdate.shiftDataAsByteBuffer())); + deltaUpdates.setShiftedRanges(ShiftedRangeReader.read(barrageUpdate.shiftDataAsByteBuffer())); RangeSet includedAdditions; diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java index 11ac09b4231..5b74e3c7a09 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java @@ -9,6 +9,7 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; public interface RowSequence extends SafeCloseable, LongSizedDataStructure { + long NULL_ROW_KEY = -1L; boolean isEmpty(); long lastRowKey(); boolean forEachRowKey(LongAbortableConsumer lac); @@ -18,7 +19,6 @@ default void forAllRowKeys(java.util.function.LongConsumer lc) { return true; }); } -// void forAllRowKeys(java.util.function.LongConsumer lc); void forAllRowKeyRanges(LongRangeConsumer lrc); } \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java index d3fd8100907..90ad679f7cf 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java @@ -3,9 +3,12 @@ // package io.deephaven.engine.rowset; +import io.deephaven.engine.rowset.impl.WritableRowSetImpl; +import io.deephaven.web.shared.data.RangeSet; + public class RowSequenceFactory { - public static /*final*/ RowSequence EMPTY; + public static final RowSequence EMPTY = new WebRowSetImpl(RangeSet.empty()); public static RowSequence forRange(final long firstRowKey, final long lastRowKey) { - return null; + return new WebRowSetImpl(RangeSet.ofRange(firstRowKey, lastRowKey)); } } \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java index ebe0d6166b4..fd2d63c1b40 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java @@ -8,4 +8,7 @@ public interface RowSet extends RowSequence, LongSizedDataStructure, SafeCloseable { RowSet copy(); + long get(long rowPosition); + + WritableRowSet intersect(RowSet rowSet); } \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java index 6ed9e89cd04..c5a8716ce96 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java @@ -4,9 +4,20 @@ package io.deephaven.engine.rowset; import io.deephaven.engine.rowset.RowSetBuilderSequential; +import io.deephaven.web.shared.data.RangeSet; public class RowSetFactory { + + public static RowSet empty() { + return new WebRowSetImpl(RangeSet.empty()); + } public static RowSetBuilderSequential builderSequential() { - return null; + return new WebRowSetBuilderSequentialImpl(); + } + public static RowSet fromRange(long first, long last) { + return new WebRowSetImpl(RangeSet.ofRange(first, last)); + } + public static RowSet flat(long size) { + return size <= 0 ? empty() : new WebRowSetImpl(RangeSet.ofRange(0, size - 1)); } } \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java new file mode 100644 index 00000000000..871bc6b1b65 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java @@ -0,0 +1,21 @@ +package io.deephaven.engine.rowset; + +import io.deephaven.web.shared.data.Range; +import io.deephaven.web.shared.data.RangeSet; + +final class WebRowSetBuilderSequentialImpl implements RowSetBuilderSequential { + private final RangeSet rangeSet = new RangeSet(); + @Override + public void appendRange(long rangeFirstRowKey, long rangeLastRowKey) { + rangeSet.addRange(new Range(rangeFirstRowKey, rangeLastRowKey)); + } + + @Override + public void accept(long first, long last) { + appendRange(first, last); + } + @Override + public RowSet build() { + return new WebRowSetImpl(rangeSet); + } +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java new file mode 100644 index 00000000000..99670fee366 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java @@ -0,0 +1,89 @@ +package io.deephaven.engine.rowset; + +import io.deephaven.util.datastructures.LongAbortableConsumer; +import io.deephaven.util.datastructures.LongRangeConsumer; +import io.deephaven.web.shared.data.RangeSet; + +import java.util.PrimitiveIterator; + +final class WebRowSetImpl implements RowSet, WritableRowSet { + private final RangeSet rangeSet; + + WebRowSetImpl(RangeSet rangeSet) { + this.rangeSet = rangeSet; + } + + @Override + public boolean isEmpty() { + return rangeSet.isEmpty(); + } + + @Override + public long lastRowKey() { + return rangeSet.getLastRow(); + } + + @Override + public boolean forEachRowKey(LongAbortableConsumer lac) { + PrimitiveIterator.OfLong iter = rangeSet.indexIterator(); + while (iter.hasNext()) { + long key = iter.nextLong(); + if (!lac.accept(key)) { + return false; + } + } + return true; + } + + @Override + public void forAllRowKeyRanges(LongRangeConsumer lrc) { + rangeSet.rangeIterator().forEachRemaining(r -> { + lrc.accept(r.getFirst(), r.getLast()); + }); + } + + @Override + public long get(long position) { + return -1; + } + @Override + public WritableRowSet intersect(RowSet rowSet) { + return this; + } + @Override + public WritableRowSet shift(long shiftAmount) { + return this; + } + + + @Override + public long size() { + return rangeSet.size(); + } + + @Override + public void close() { + + } + + @Override + public RowSet copy() { + return new WebRowSetImpl(rangeSet.copy()); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof WebRowSetImpl)) { + return false; + } + return rangeSet.equals(((WebRowSetImpl) obj).rangeSet); + } + + @Override + public int hashCode() { + return rangeSet.hashCode(); + } +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java new file mode 100644 index 00000000000..2b19f8847ca --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java @@ -0,0 +1,8 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.engine.rowset; + +public interface WritableRowSet extends RowSet { + WritableRowSet shift(long shiftAmount); +} \ No newline at end of file diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 7a20483716f..c819ced186a 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -333,6 +333,12 @@ public long getLastRow() { return sortedRanges[sortedRanges.length - 1].getLast(); } + public RangeSet copy() { + RangeSet copy = new RangeSet(); + copy.sortedRanges = Arrays.copyOf(sortedRanges, sortedRanges.length); + return copy; + } + @Override public boolean equals(Object o) { if (this == o) From 55d52372595243381e00f85c92c954f37d7a1ed5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 11 Nov 2022 12:37:50 -0600 Subject: [PATCH 047/219] Update GWT, and prune unneeded dependencies --- buildSrc/src/main/groovy/Classpaths.groovy | 11 +++++------ buildSrc/src/main/groovy/GwtTools.groovy | 4 ++++ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index a09da2269ad..9ca77f841fd 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -24,7 +24,7 @@ class Classpaths { static final String ELEMENTAL_VERSION = '1.1.0' static final String GWT_GROUP = 'com.google.gwt' - static final String GWT_VERSION = '2.9.0' + static final String GWT_VERSION = '2.10.0' static final String JAVA_PARSER_GROUP = 'com.github.javaparser' static final String JAVA_PARSER_NAME = 'javaparser-core' @@ -36,8 +36,7 @@ class Classpaths { static final String JAVAX_ANNOTATIONS_NAME = 'validation-api' static final String JAVAX_ANNOTATIONS_VERSION = '1.0.0.GA' - static final String JETTY_GROUP = 'org.eclipse.jetty' - static final String JETTY_VERSION = '9.4.20.v20190813' + static final String JETTY_VERSION = '9.4.44.v20210927' static final String JS_INTEROP_GROUP = 'com.google.jsinterop' static final String JS_INTEROP_VERSION = '2.0.0' @@ -146,9 +145,9 @@ class Classpaths { if (addDependency(config, GWT_GROUP, name, GWT_VERSION)) { // when we add gwt-dev, lets also force asm version, just to be safe. name == 'gwt-dev' && config.resolutionStrategy { - force 'org.ow2.asm:asm:5.0.3' - force 'org.ow2.asm:asm-util:5.0.3' - force 'org.ow2.asm:asm-commons:5.0.3' + force 'org.ow2.asm:asm:9.2' + force 'org.ow2.asm:asm-util:9.2' + force 'org.ow2.asm:asm-commons:9.2' } } } diff --git a/buildSrc/src/main/groovy/GwtTools.groovy b/buildSrc/src/main/groovy/GwtTools.groovy index 64517239f87..40acb779c4a 100644 --- a/buildSrc/src/main/groovy/GwtTools.groovy +++ b/buildSrc/src/main/groovy/GwtTools.groovy @@ -83,6 +83,10 @@ class GwtTools { static void applyDefaults(Project p, GwtExtension gwt, boolean compile = false) { gwt.gwtVersion = Classpaths.GWT_VERSION gwt.jettyVersion = Classpaths.JETTY_VERSION + p.configurations.each { + it.exclude group: 'net.sourceforge.htmlunit' + it.exclude group: 'org.eclipse.jetty' + } if (compile) { String warPath = new File(p.buildDir, 'gwt').absolutePath From 5591d2ca556d53bd51f47b9eaac939f728faba73 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jul 2023 10:36:16 -0500 Subject: [PATCH 048/219] Start moving to updated jsinterop jars --- buildSrc/src/main/groovy/Classpaths.groovy | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index 9ca77f841fd..15a3b2e1031 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -39,7 +39,7 @@ class Classpaths { static final String JETTY_VERSION = '9.4.44.v20210927' static final String JS_INTEROP_GROUP = 'com.google.jsinterop' - static final String JS_INTEROP_VERSION = '2.0.0' + static final String JS_INTEROP_VERSION = '2.0.2' static final String COMMONS_GROUP = 'org.apache.commons' @@ -167,7 +167,7 @@ class Classpaths { addDependency config, JS_INTEROP_GROUP, name, // google is annoying, and have different versions released for the same groupId // :base: is the only one that is different, so we'll use it in the ternary. - name == 'base'? '1.0.0' : JS_INTEROP_VERSION + name == 'base'? '1.0.1' : JS_INTEROP_VERSION } static void inheritElemental(Project p, String name, String configName) { From 448a5895604742e27cde51317c9ea59e47066c33 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Sun, 25 Feb 2024 18:17:02 -0600 Subject: [PATCH 049/219] Update elemental2 --- buildSrc/src/main/groovy/Classpaths.groovy | 2 +- .../io/deephaven/web/client/api/CoreClient.java | 2 +- .../web/client/api/HasEventHandling.java | 2 +- .../web/client/api/JsPartitionedTable.java | 2 +- .../io/deephaven/web/client/api/JsTable.java | 4 ++-- .../web/client/api/JsTotalsTableConfig.java | 10 +++++----- .../web/client/api/WorkerConnection.java | 2 +- .../client/api/barrage/stream/BiDiStream.java | 2 +- .../barrage/stream/HandshakeStreamFactory.java | 16 ++++++++-------- .../web/client/api/storage/JsStorageService.java | 2 +- .../web/client/api/tree/JsRollupConfig.java | 10 +++++----- .../web/client/api/tree/JsTreeTable.java | 8 ++++---- .../web/client/api/widget/JsWidget.java | 2 +- .../api/widget/calendar/JsBusinessCalendar.java | 6 +++--- .../client/api/widget/calendar/JsHoliday.java | 2 +- .../web/client/api/widget/plot/ChartData.java | 6 +++--- .../api/widget/plot/FigureSubscription.java | 2 +- .../api/widget/plot/JsChartDescriptor.java | 6 +++--- .../api/widget/plot/JsFigureDescriptor.java | 8 ++++---- .../client/api/widget/plot/JsFigureFactory.java | 2 +- .../client/api/widget/plot/JsMultiSeries.java | 2 +- .../api/widget/plot/JsSeriesDescriptor.java | 2 +- .../web/client/api/widget/plot/OneClick.java | 2 +- .../java/io/deephaven/web/client/fu/JsItr.java | 2 +- .../io/deephaven/web/client/ide/IdeSession.java | 4 ++-- 25 files changed, 54 insertions(+), 54 deletions(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index 15a3b2e1031..06fc40ed55e 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -21,7 +21,7 @@ import org.gradle.internal.Actions class Classpaths { static final String ELEMENTAL_GROUP = 'com.google.elemental2' - static final String ELEMENTAL_VERSION = '1.1.0' + static final String ELEMENTAL_VERSION = '1.2.1' static final String GWT_GROUP = 'com.google.gwt' static final String GWT_VERSION = '2.10.0' diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/CoreClient.java b/web/client-api/src/main/java/io/deephaven/web/client/api/CoreClient.java index 2b11d61d8aa..8687dedcd18 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/CoreClient.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/CoreClient.java @@ -84,7 +84,7 @@ public String getServerUrl() { public Promise getAuthConfigValues() { return ideConnection.getConnectOptions().then(options -> { BrowserHeaders metadata = new BrowserHeaders(); - JsObject.keys(options.headers).forEach((key, index, arr) -> { + JsObject.keys(options.headers).forEach((key, index) -> { metadata.set(key, options.headers.get(key)); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java b/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java index adb57092f10..1f39d076dcd 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java @@ -178,7 +178,7 @@ public void fireEvent(String type, CustomEvent e) { } if (map.has(e.type)) { final JsArray> callbacks = Js.cast(JsArray.from((JsArrayLike>) map.get(e.type))); - callbacks.forEach((item, ind, all) -> { + callbacks.forEach((item, ind) -> { try { item.onEvent(e); } catch (Throwable t) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java index 04e34010d1f..38a184716a6 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java @@ -163,7 +163,7 @@ private void handleKeys(Event update) { RangeSet added = eventData.getAdded().getRange(); added.indexIterator().forEachRemaining((long index) -> { // extract the key to use - JsArray key = eventData.getColumns().map((c, p1, p2) -> eventData.getData(index, c)); + JsArray key = eventData.getColumns().map((c, p1) -> eventData.getData(index, c)); knownKeys.add(key.asList()); CustomEventInit> init = CustomEventInit.create(); init.setDetail(key); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index 6f9a38f096c..4e380631ad7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -634,7 +634,7 @@ default CustomColumn asCustomColumn() { @JsMethod @SuppressWarnings("unusable-by-js") public JsArray applyCustomColumns(JsArray customColumns) { - String[] customColumnStrings = customColumns.map((item, index, array) -> { + String[] customColumnStrings = customColumns.map((item, index) -> { if (item.isString() || item.isCustomColumn()) { return item.toString(); } @@ -1728,7 +1728,7 @@ public void processSnapshot() { return; } JsArray viewportColumns = - getColumns().filter((item, index, all) -> debounce.columns.get(item.getIndex())); + getColumns().filter((item, index) -> debounce.columns.get(item.getIndex())); ViewportData data = new ViewportData(debounce.includedRows, debounce.dataColumns, viewportColumns, currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN : currentState.getRowFormatColumn().getIndex(), diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java index 8777f250dd0..b92364c9a6d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java @@ -133,7 +133,7 @@ public JsTotalsTableConfig(JsPropertyMap source) { if (source.has("operationMap")) { operationMap = source.getAsAny("operationMap").cast(); operationMap.forEach(key -> { - operationMap.get(key).forEach((value, index, array) -> { + operationMap.get(key).forEach((value, index) -> { checkOperation(Js.cast(value)); return null; }); @@ -237,9 +237,9 @@ public AggregateRequest buildRequest(JsArray allColumns) { Map> aggs = new HashMap<>(); List colsNeedingCompoundNames = new ArrayList<>(); Set seenColNames = new HashSet<>(); - groupBy.forEach((col, p1, p2) -> seenColNames.add(Js.cast(col))); + groupBy.forEach((col, p1) -> seenColNames.add(Js.cast(col))); this.operationMap.forEach(colName -> { - this.operationMap.get(colName).forEach((agg, index, arr) -> { + this.operationMap.get(colName).forEach((agg, index) -> { if (!JsAggregationOperation.canAggregateType(agg, columnTypes.get(colName))) { // skip this column. to follow DHE's behavior return null; @@ -272,7 +272,7 @@ public AggregateRequest buildRequest(JsArray allColumns) { AggregationCount count = new AggregationCount(); count.setColumnName("Count"); agg.setCount(count); - aggColumns.forEach((p0, p1, p2) -> { + aggColumns.forEach((p0, p1) -> { String colName = p0.split("=")[0].trim(); customColumns.push(colName + " = Count"); return null; @@ -296,7 +296,7 @@ public AggregateRequest buildRequest(JsArray allColumns) { columns.setSpec(spec); columns.setMatchPairsList(aggColumns); agg.setColumns(columns); - aggColumns.forEach((p0, p1, p2) -> { + aggColumns.forEach((p0, p1) -> { String colName = p0.split("=")[0].trim(); customColumns.push(colName + "= `` + " + colName); return null; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index a3808691c8b..625b80443df 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -278,7 +278,7 @@ private void connectToWorker() { }), info.getConnectOptions().then(options -> { // set other specified headers, if any - JsObject.keys(options.headers).forEach((key, index, arr) -> { + JsObject.keys(options.headers).forEach((key, index) -> { metadata.set(key, options.headers.get(key)); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/BiDiStream.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/BiDiStream.java index 0132c24899b..758fe62f20c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/BiDiStream.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/BiDiStream.java @@ -187,7 +187,7 @@ static class EmulatedBiDiStream extends BiDiStream { public void send(T payload) { if (responseStream == null) { responseStream = responseStreamFactory.apply(payload); - pending.forEach((p0, p1, p2) -> { + pending.forEach((p0, p1) -> { p0.apply(responseStream); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/HandshakeStreamFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/HandshakeStreamFactory.java index 260a85254db..bae2d3c41fd 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/HandshakeStreamFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/stream/HandshakeStreamFactory.java @@ -50,18 +50,18 @@ public static BiDiStream create(WorkerConne Client client = Grpc.client(FlightService.Handshake, (io.deephaven.javascript.proto.dhinternal.grpcweb.grpc.ClientRpcOptions) options); client.onEnd((status, statusMessage, trailers) -> { - listeners.get(STATUS_EVENT_LISTENER_NAME).forEach((item, index, arr) -> item.call(null, + listeners.get(STATUS_EVENT_LISTENER_NAME).forEach((item, index) -> item.call(null, ResponseStreamWrapper.Status.of(status, statusMessage, metadata))); - listeners.get(END_EVENT_LISTENER_NAME).forEach((item, index, arr) -> item.call(null, + listeners.get(END_EVENT_LISTENER_NAME).forEach((item, index) -> item.call(null, ResponseStreamWrapper.Status.of(status, statusMessage, metadata))); listeners.clear(); }); client.onMessage(message -> { - listeners.get(DATA_EVENT_LISTENER_NAME).forEach((item, index, arr) -> item.call(null, message)); + listeners.get(DATA_EVENT_LISTENER_NAME).forEach((item, index) -> item.call(null, message)); }); client.onHeaders(headers -> { listeners.get(HEADERS_EVENT_LISTENER_NAME) - .forEach((item, index, arr) -> item.call(null, headers)); + .forEach((item, index) -> item.call(null, headers)); }); client.start(metadata); @@ -103,20 +103,20 @@ public BidirectionalStream write( props.setDebug(false); props.setOnMessage(responseMessage -> { listeners.get(DATA_EVENT_LISTENER_NAME) - .forEach((item, index, arr) -> item.call(null, responseMessage)); + .forEach((item, index) -> item.call(null, responseMessage)); }); props.setOnEnd((status, statusMessage, trailers) -> { listeners.get(STATUS_EVENT_LISTENER_NAME).forEach( - (item, index, arr) -> item.call(null, + (item, index) -> item.call(null, ResponseStreamWrapper.Status.of(status, statusMessage, metadata))); listeners.get(END_EVENT_LISTENER_NAME).forEach( - (item, index, arr) -> item.call(null, + (item, index) -> item.call(null, ResponseStreamWrapper.Status.of(status, statusMessage, metadata))); listeners.clear(); }); props.setOnHeaders(headers -> { listeners.get(HEADERS_EVENT_LISTENER_NAME) - .forEach((item, index, arr) -> item.call(null, headers)); + .forEach((item, index) -> item.call(null, headers)); }); Request client = Grpc.invoke.onInvoke(BrowserFlightService.OpenHandshake, props); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/storage/JsStorageService.java b/web/client-api/src/main/java/io/deephaven/web/client/api/storage/JsStorageService.java index 0ec927df9ee..2e92acb3c01 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/storage/JsStorageService.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/storage/JsStorageService.java @@ -65,7 +65,7 @@ public Promise> listItems(String path, @JsOptional String return Callbacks.grpcUnaryPromise(c -> client().listItems(req, metadata(), c::apply)) .then(response -> Promise .resolve(response.getItemsList() - .map((item, i, arr) -> JsItemDetails.fromProto(response.getCanonicalPath(), item)))); + .map((item, i) -> JsItemDetails.fromProto(response.getCanonicalPath(), item)))); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java index f2861b1872c..1a0e966abc3 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java @@ -115,10 +115,10 @@ public RollupRequest buildRequest(JsArray tableColumns) { Map> aggs = new HashMap<>(); List colsNeedingCompoundNames = new ArrayList<>(); Set seenColNames = new HashSet<>(); - groupingColumns.forEach((col, p1, p2) -> seenColNames.add(Js.cast(col))); + groupingColumns.forEach((col, p1) -> seenColNames.add(Js.cast(col))); this.aggregations.forEach(key -> { LinkedHashSet cols = new LinkedHashSet<>(); - this.aggregations.get(key).forEach((col, index, arr) -> { + this.aggregations.get(key).forEach((col, index) -> { String colName = Js.cast(col); cols.add(colName); if (seenColNames.contains(colName)) { @@ -297,14 +297,14 @@ private JsArray dedup(LinkedHashSet cols, List colsNeedi private String unusedColumnName(JsArray existingColumns, String... suggestedNames) { // Try to use the default column names for (String suggestedName : suggestedNames) { - if (!existingColumns.some((p0, p1, p2) -> p0.getName().equals(suggestedName))) { + if (!existingColumns.some((p0, p1) -> p0.getName().equals(suggestedName))) { return suggestedName; } } // Next add a suffix and use that if possible for (String suggestedName : suggestedNames) { - if (!existingColumns.some((p0, p1, p2) -> p0.getName().equals(suggestedName + "_"))) { + if (!existingColumns.some((p0, p1) -> p0.getName().equals(suggestedName + "_"))) { return suggestedName + "_"; } } @@ -312,7 +312,7 @@ private String unusedColumnName(JsArray existingColumns, String... sugge // Give up and add a timestamp suffix for (String suggestedName : suggestedNames) { if (!existingColumns - .some((p0, p1, p2) -> p0.getName().equals(suggestedName + "_" + System.currentTimeMillis()))) { + .some((p0, p1) -> p0.getName().equals(suggestedName + "_" + System.currentTimeMillis()))) { return suggestedName + "_" + System.currentTimeMillis(); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 37d8fecd497..50808f9776d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -158,7 +158,7 @@ private TreeViewportData(double offset, long viewportSize, double treeSize, Colu // Without modifying this.columns (copied and frozen), make sure our key columns are present // in the list of columns that we will copy data for the viewport - keyColumns.forEach((col, p1, p2) -> { + keyColumns.forEach((col, p1) -> { if (this.columns.indexOf(col) == -1) { columns[columns.length] = col; } @@ -567,8 +567,8 @@ private Promise makeKeyTable() { keyTableColumns.push(rowDepthCol); keyTableColumns.push(actionCol); keyTable = connection.newTable( - Js.uncheckedCast(keyTableColumns.map((p0, p1, p2) -> p0.getName())), - Js.uncheckedCast(keyTableColumns.map((p0, p1, p2) -> p0.getType())), + Js.uncheckedCast(keyTableColumns.map((p0, p1) -> p0.getName())), + Js.uncheckedCast(keyTableColumns.map((p0, p1) -> p0.getType())), keyTableData, null, null); @@ -804,7 +804,7 @@ private BitSet makeColumnSubscriptionBitset() { } columnsBitset.set(rowDepthCol.getIndex()); columnsBitset.set(rowExpandedCol.getIndex()); - keyColumns.forEach((p0, p1, p2) -> { + keyColumns.forEach((p0, p1) -> { columnsBitset.set(p0.getIndex()); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/JsWidget.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/JsWidget.java index 3581ce102a4..e0a35c53791 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/JsWidget.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/JsWidget.java @@ -145,7 +145,7 @@ public Promise refetch() { messageStream.onData(res -> { JsArray responseObjects = res.getData().getExportedReferencesList() - .map((p0, p1, p2) -> new JsWidgetExportedObject(connection, p0)); + .map((p0, p1) -> new JsWidgetExportedObject(connection, p0)); if (!hasFetched) { response = res; exportedObjects = responseObjects; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java index c2a50c1417a..837a67a1618 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java @@ -28,9 +28,9 @@ public JsBusinessCalendar(BusinessCalendarDescriptor businessCalendarDescriptor) JsObject.freeze(this.businessCalendarDescriptor); timeZone = JsTimeZone.getTimeZone(businessCalendarDescriptor.getTimeZone()); businessPeriods = - businessCalendarDescriptor.getBusinessPeriodsList().map((p0, p1, p2) -> new JsBusinessPeriod(p0)); + businessCalendarDescriptor.getBusinessPeriodsList().map((p0, p1) -> new JsBusinessPeriod(p0)); JsObject.freeze(businessPeriods); - holidays = businessCalendarDescriptor.getHolidaysList().map((p0, p1, p2) -> new JsHoliday(p0)); + holidays = businessCalendarDescriptor.getHolidaysList().map((p0, p1) -> new JsHoliday(p0)); JsObject.freeze(holidays); } @@ -62,7 +62,7 @@ public JsTimeZone getTimeZone() { @JsProperty public JsArray getBusinessDays() { return businessCalendarDescriptor.getBusinessDaysList() - .map((p0, p1, p2) -> JsDayOfWeek.values()[(int) (double) p0]); + .map((p0, p1) -> JsDayOfWeek.values()[(int) (double) p0]); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java index 5cce1d42ee5..f3deea8d0fc 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java @@ -21,7 +21,7 @@ public class JsHoliday { public JsHoliday(Holiday holiday) { date = new LocalDateWrapper(holiday.getDate().getYear(), holiday.getDate().getMonth(), holiday.getDate().getDay()); - businessPeriods = holiday.getBusinessPeriodsList().map((p0, p1, p2) -> new JsBusinessPeriod(p0)); + businessPeriods = holiday.getBusinessPeriodsList().map((p0, p1) -> new JsBusinessPeriod(p0)); JsObject.freeze(businessPeriods); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java index b81b84dbc3b..8a30411fd33 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java @@ -128,7 +128,7 @@ public void update(UpdateEventData tableData) { assert cachedData.values().stream().flatMap(m -> m.values().stream()) .allMatch(arr -> arr.length == indexes.length); assert cachedData.values().stream().flatMap(m -> m.values().stream()).allMatch(arr -> arr - .reduce((Object val, Any p1, int p2, JsArray p3) -> ((Integer) val) + 1, 0) == indexes.length); + .reduce((Object val, Any p1, int p2) -> ((Integer) val) + 1, 0) == indexes.length); JsRangeSet fullIndex = tableData.getFullIndex(); PrimitiveIterator.OfLong iter = fullIndex.getRange().indexIterator(); @@ -240,10 +240,10 @@ public JsArray getColumn(String columnName, JsFunction mappingFun private JsArray collectAllData(String columnName, JsFunction mappingFunc, TableData currentUpdate) { Column column = table.findColumn(columnName); if (mappingFunc == null) { - return currentUpdate.getRows().map((p0, p1, p2) -> p0.get(column)); + return currentUpdate.getRows().map((p0, p1) -> p0.get(column)); } - return currentUpdate.getRows().map((p0, p1, p2) -> mappingFunc.apply(p0.get(column))); + return currentUpdate.getRows().map((p0, p1) -> mappingFunc.apply(p0.get(column))); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java index e71ba18e458..2b1ed636861 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java @@ -252,7 +252,7 @@ private Promise subscribe(final Promise tablePromise } TableSubscription sub = table.subscribe( - table.getColumns().filter((c, index, all) -> this.requiredColumns.contains(c.getName()))); + table.getColumns().filter((c, index) -> this.requiredColumns.contains(c.getName()))); // TODO, technically we can probably unsubscribe to the table at this point, since we're listening to the // subscription itself diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChartDescriptor.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChartDescriptor.java index ee4147d0238..c6e8f048dc9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChartDescriptor.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChartDescriptor.java @@ -55,21 +55,21 @@ public JsChartDescriptor(JsPropertyMap source) { Map axisMap = new HashMap<>(); if (source.has("axes")) { JsArray axes = source.getAsAny("axes").cast(); - this.axes = Js.uncheckedCast(axes.map((axisSource, index, all) -> { + this.axes = Js.uncheckedCast(axes.map((axisSource, index) -> { if (axisSource instanceof JsAxisDescriptor) { return (JsAxisDescriptor) axisSource; } else { return new JsAxisDescriptor((JsPropertyMap) axisSource); } })); - this.axes.forEach((axis, i, all) -> axisMap.put(axes.getAt(i), axis)); + this.axes.forEach((axis, i) -> axisMap.put(axes.getAt(i), axis)); } else { throw new IllegalArgumentException("'axes' property must be set"); } if (source.has("series")) { JsArray series = source.getAsAny("series").cast(); - this.series = Js.uncheckedCast(series.map((seriesSource, index, all) -> { + this.series = Js.uncheckedCast(series.map((seriesSource, index) -> { if (seriesSource instanceof JsSeriesDescriptor) { return (JsSeriesDescriptor) seriesSource; } else { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureDescriptor.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureDescriptor.java index aeb5624898b..4d1d4f17e60 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureDescriptor.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureDescriptor.java @@ -50,7 +50,7 @@ public JsFigureDescriptor(JsPropertyMap source) { this(); JsArray charts = JsData.getRequiredProperty(source, "charts").cast(); - this.charts = Js.uncheckedCast(charts.map((chartSource, index, all) -> { + this.charts = Js.uncheckedCast(charts.map((chartSource, index) -> { if (chartSource instanceof JsChartDescriptor) { return (JsChartDescriptor) chartSource; } else { @@ -70,9 +70,9 @@ public JsFigureDescriptor(JsPropertyMap source) { @JsIgnore public JsArray getTables() { Set tableSet = new HashSet<>(); - charts.forEach((chart, i1, a1) -> { - chart.series.forEach((series, i2, a2) -> { - series.dataSources.forEach((source, i3, a3) -> { + charts.forEach((chart, i1) -> { + chart.series.forEach((series, i2) -> { + series.dataSources.forEach((source, i3) -> { tableSet.add(source.table); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java index 38a6780fcbf..0b2dd4109ca 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java @@ -43,7 +43,7 @@ private static Promise create(JsFigureDescriptor descriptor) { FetchObjectResponse response = new FetchObjectResponse(); response.setData(figureDescriptor.serializeBinary()); Promise[] tableCopyPromises = - tables.map((table, index, all) -> table.copy(false)).asArray(new Promise[0]); + tables.map((table, index) -> table.copy(false)).asArray(new Promise[0]); return Promise.all(tableCopyPromises) .then(unknownTableCopies -> { JsArray jsTableCopies = Js.cast(unknownTableCopies); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java index 432b9ca6ad9..482bb9e281f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java @@ -84,7 +84,7 @@ private void requestTable(JsPartitionedTable partitionedTable, Object key) { seriesInstance.setDataSourcesList( descriptor.getDataSourcesList() - .map((multiSeriesSource, p1, p2) -> { + .map((multiSeriesSource, p1) -> { SourceDescriptor sourceDescriptor = new SourceDescriptor(); sourceDescriptor.setColumnName(multiSeriesSource.getColumnName()); sourceDescriptor.setAxisId(multiSeriesSource.getAxisId()); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java index 3fea7c635a4..8ac17cc7302 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java @@ -57,7 +57,7 @@ public JsSeriesDescriptor(JsPropertyMap source, Map> dataSources = JsData.getRequiredProperty(source, "dataSources").cast(); - this.dataSources = dataSources.map((sourceSource, index, all) -> new JsSourceDescriptor(sourceSource, axisMap)); + this.dataSources = dataSources.map((sourceSource, index) -> new JsSourceDescriptor(sourceSource, axisMap)); linesVisible = JsData.getNullableBooleanProperty(source, "linesVisible"); shapesVisible = JsData.getNullableBooleanProperty(source, "shapesVisible"); gradientVisible = JsData.getNullableBooleanProperty(source, "gradientVisible"); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java index 2736ecc0344..d95fa11614b 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java @@ -129,7 +129,7 @@ private Object[] getCurrentKeys() { } // Some of the values aren't set, need to iterate through all the table map keys and select the ones that match - return JsArray.from(partitionedTable.getKeys()).filter((tableKey, index, all) -> { + return JsArray.from(partitionedTable.getKeys()).filter((tableKey, index) -> { if (!(tableKey instanceof String[])) { return false; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsItr.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsItr.java index b1254a8f973..d1658750014 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsItr.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsItr.java @@ -64,7 +64,7 @@ public T next() { * js forEach signature is a bit weird, so we'll adapt it to something saner here */ public static void forEach(JsMap map, JsBiConsumer callback) { - map.forEach((v, k, m) -> { + map.forEach((v, k) -> { callback.apply(k, v); return null; }); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/ide/IdeSession.java b/web/client-api/src/main/java/io/deephaven/web/client/ide/IdeSession.java index 314daf265a9..438c5c857e4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/ide/IdeSession.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/ide/IdeSession.java @@ -370,7 +370,7 @@ public Promise> getCompl .timeout(JsTable.MAX_BATCH_TIME) .asPromise() .then(res -> Promise.resolve( - res.getCompletionItems().getItemsList().map((item, index, arr) -> LspTranslate.toJs(item))), + res.getCompletionItems().getItemsList().map((item, index) -> LspTranslate.toJs(item))), fail -> { // noinspection unchecked, rawtypes return (Promise>) (Promise) Promise @@ -398,7 +398,7 @@ public Promise> ge .timeout(JsTable.MAX_BATCH_TIME) .asPromise() .then(res -> Promise.resolve( - res.getSignatures().getSignaturesList().map((item, index, arr) -> LspTranslate.toJs(item))), + res.getSignatures().getSignaturesList().map((item, index) -> LspTranslate.toJs(item))), fail -> { // noinspection unchecked, rawtypes return (Promise>) (Promise) Promise From 4d12c1d48450a4090fbc8735f7a72bb91eedc74a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Sun, 25 Feb 2024 20:44:21 -0600 Subject: [PATCH 050/219] Dirty hack to use newer gwt --- buildSrc/build.gradle | 2 +- buildSrc/src/main/groovy/Classpaths.groovy | 4 +- buildSrc/src/main/groovy/GwtTools.groovy | 274 +++++++++--------- web/client-api/client-api.gradle | 22 +- web/client-backplane/client-backplane.gradle | 2 +- .../table_pb_service/TableServiceClient.java | 2 - web/shared-beans/shared-beans.gradle | 2 +- .../io/deephaven/web/shared/data/Range.java | 3 +- .../shared/data/TableSubscriptionRequest.java | 3 +- 9 files changed, 164 insertions(+), 150 deletions(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index a413615a03c..9230722e592 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -17,7 +17,7 @@ repositories { } dependencies { - implementation('de.esoco.gwt:gwt-gradle-plugin:1.2.0') { + implementation('org.docstr:gwt-gradle-plugin:1.1.30') { exclude group: 'org.codehaus.groovy' because('needed for GwtTools') } diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index 06fc40ed55e..23d98b50539 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -23,8 +23,8 @@ class Classpaths { static final String ELEMENTAL_GROUP = 'com.google.elemental2' static final String ELEMENTAL_VERSION = '1.2.1' - static final String GWT_GROUP = 'com.google.gwt' - static final String GWT_VERSION = '2.10.0' + static final String GWT_GROUP = 'org.gwtproject' + static final String GWT_VERSION = '2.11.0' static final String JAVA_PARSER_GROUP = 'com.github.javaparser' static final String JAVA_PARSER_NAME = 'javaparser-core' diff --git a/buildSrc/src/main/groovy/GwtTools.groovy b/buildSrc/src/main/groovy/GwtTools.groovy index 40acb779c4a..ef728cce19d 100644 --- a/buildSrc/src/main/groovy/GwtTools.groovy +++ b/buildSrc/src/main/groovy/GwtTools.groovy @@ -1,137 +1,137 @@ -import de.esoco.gwt.gradle.GwtLibPlugin -import de.esoco.gwt.gradle.GwtPlugin -import de.esoco.gwt.gradle.extension.GwtExtension -import de.esoco.gwt.gradle.task.GwtCheckTask -import de.esoco.gwt.gradle.task.GwtCompileTask -import groovy.transform.CompileStatic -import org.gradle.api.Project -import org.gradle.api.artifacts.ProjectDependency -import org.gradle.api.file.ConfigurableFileCollection -import org.gradle.api.plugins.JavaPlugin -import org.gradle.api.tasks.compile.JavaCompile - -import java.nio.file.Files - -/** - * Helper to simplify / centralize configuring gwt plugins in build files - */ -@CompileStatic -class GwtTools { - - static GwtExtension gwtLib(Project p) { - p.plugins.apply(GwtLibPlugin) - GwtExtension ext = p.extensions.getByType(GwtExtension) - applyDefaults(p, ext) - return ext - } - static GwtExtension gwtCompile(Project p, String module, String description) { - p.plugins.apply(GwtPlugin) - GwtExtension ext = p.extensions.getByType(GwtExtension) - applyDefaults(p, ext, true) - - // Apply our module settings to and gwtc task; - // currently, there should only be one such task, - // but we used to have two, and may have two again later, - // so we'll leave this setup to be friendly-for-reuse - p.tasks.withType(GwtCompileTask).all { - GwtCompileTask gwtc -> - applyModuleSettings p, gwtc, module,description - } - // This GWT plugin will fail if tests are run after compilation, instead - // we suppress running the test at all, and ensure that it doesn't check - // if it even can be run until after compile finishes. - p.tasks.withType(GwtCheckTask).configureEach {t -> - t.mustRunAfter(p.tasks.withType(GwtCompileTask)) - t.onlyIf { false } - } - - return ext - } - - static void applyModuleSettings(Project p, GwtCompileTask gwtc, String mod, String description) { - gwtc.onlyIf WebTools.&shouldRun - boolean gwtDev = p.findProperty('gwtDev') == 'true' - String extras = new File(p.buildDir, "gwt/dhapi/extra").absolutePath - - GwtExtension gwt = p.extensions.findByType(GwtExtension) - - gwt.with { - module "${mod}${gwtDev ? 'Dev' : ''}" - compile.with { - style = 'PRETTY' - generateJsInteropExports = true - // TODO move this down a line when we want to give clients js that is not super strict / rigged to blow - checkAssertions = true - if (gwtDev) { - saveSource = true - extra = extras - logLevel = 'INFO' - draftCompile = true - } - } - } - - p.gradle.projectsEvaluated { - addGeneratedSources(p, gwtc) - } - - gwtDev && gwtc.doFirst { - gwtc.logger.quiet('Running in gwt dev mode; saving source to {}/dh/src', extras) - } - } - - static void applyDefaults(Project p, GwtExtension gwt, boolean compile = false) { - gwt.gwtVersion = Classpaths.GWT_VERSION - gwt.jettyVersion = Classpaths.JETTY_VERSION - p.configurations.each { - it.exclude group: 'net.sourceforge.htmlunit' - it.exclude group: 'org.eclipse.jetty' - } - if (compile) { - - String warPath = new File(p.buildDir, 'gwt').absolutePath - - gwt.compile.with { - // See https://github.com/esoco/gwt-gradle-plugin for all options - /** The level of logging detail (ERROR, WARN, INFO, TRACE, DEBUG, SPAM, ALL) */ - logLevel = "INFO" - /** Where to write output files */ - war = warPath - /** Compile a report that tells the "Story of Your Compile". */ - compileReport = false - /** Compile quickly with minimal optimizations. */ - draftCompile = false - /** Include assert statements in compiled output. */ - checkAssertions = false - /** Script output style. (OBF, PRETTY, DETAILED)*/ - style = "OBF" - /** Sets the optimization level used by the compiler. 0=none 9=maximum. */ - optimize = 9 - /** Fail compilation if any input file contains an error. */ - strict = true - /** Specifies Java source level. ("1.6", "1.7")*/ - sourceLevel = "11" - /** The number of local workers to use when compiling permutations. */ - localWorkers = 1 - /** Emit extra information allow chrome dev tools to display Java identifiers in many places instead of JavaScript functions. (NONE, ONLY_METHOD_NAME, ABBREVIATED, FULL)*/ -// methodNameDisplayMode = "NONE" - - /** Java args */ - maxHeapSize = "1024m" - minHeapSize = "512m" - } - } - } - - static void addGeneratedSources(Project project, GwtCompileTask gwtc) { - if (project.configurations.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME).dependencies) { - (gwtc.src as ConfigurableFileCollection).from( - (project.tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME) as JavaCompile).options.generatedSourceOutputDirectory - ) - } - project.configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).allDependencies.withType(ProjectDependency)*.dependencyProject*.each { - Project p -> addGeneratedSources(p, gwtc) - } - } - -} +//import de.esoco.gwt.gradle.GwtLibPlugin +//import de.esoco.gwt.gradle.GwtPlugin +//import de.esoco.gwt.gradle.extension.GwtExtension +//import de.esoco.gwt.gradle.task.GwtCheckTask +//import de.esoco.gwt.gradle.task.GwtCompileTask +//import groovy.transform.CompileStatic +//import org.gradle.api.Project +//import org.gradle.api.artifacts.ProjectDependency +//import org.gradle.api.file.ConfigurableFileCollection +//import org.gradle.api.plugins.JavaPlugin +//import org.gradle.api.tasks.compile.JavaCompile +// +//import java.nio.file.Files +// +///** +// * Helper to simplify / centralize configuring gwt plugins in build files +// */ +//@CompileStatic +//class GwtTools { +// +// static GwtExtension gwtLib(Project p) { +// p.plugins.apply(GwtLibPlugin) +// GwtExtension ext = p.extensions.getByType(GwtExtension) +// applyDefaults(p, ext) +// return ext +// } +// static GwtExtension gwtCompile(Project p, String module, String description) { +// p.plugins.apply(GwtPlugin) +// GwtExtension ext = p.extensions.getByType(GwtExtension) +// applyDefaults(p, ext, true) +// +// // Apply our module settings to and gwtc task; +// // currently, there should only be one such task, +// // but we used to have two, and may have two again later, +// // so we'll leave this setup to be friendly-for-reuse +// p.tasks.withType(GwtCompileTask).all { +// GwtCompileTask gwtc -> +// applyModuleSettings p, gwtc, module,description +// } +// // This GWT plugin will fail if tests are run after compilation, instead +// // we suppress running the test at all, and ensure that it doesn't check +// // if it even can be run until after compile finishes. +// p.tasks.withType(GwtCheckTask).configureEach {t -> +// t.mustRunAfter(p.tasks.withType(GwtCompileTask)) +// t.onlyIf { false } +// } +// +// return ext +// } +// +// static void applyModuleSettings(Project p, GwtCompileTask gwtc, String mod, String description) { +// gwtc.onlyIf WebTools.&shouldRun +// boolean gwtDev = p.findProperty('gwtDev') == 'true' +// String extras = new File(p.buildDir, "gwt/dhapi/extra").absolutePath +// +// GwtExtension gwt = p.extensions.findByType(GwtExtension) +// +// gwt.with { +// module "${mod}${gwtDev ? 'Dev' : ''}" +// compile.with { +// style = 'PRETTY' +// generateJsInteropExports = true +// // TODO move this down a line when we want to give clients js that is not super strict / rigged to blow +// checkAssertions = true +// if (gwtDev) { +// saveSource = true +// extra = extras +// logLevel = 'INFO' +// draftCompile = true +// } +// } +// } +// +// p.gradle.projectsEvaluated { +// addGeneratedSources(p, gwtc) +// } +// +// gwtDev && gwtc.doFirst { +// gwtc.logger.quiet('Running in gwt dev mode; saving source to {}/dh/src', extras) +// } +// } +// +// static void applyDefaults(Project p, GwtExtension gwt, boolean compile = false) { +// gwt.gwtVersion = Classpaths.GWT_VERSION +// gwt.jettyVersion = Classpaths.JETTY_VERSION +// p.configurations.each { +// it.exclude group: 'net.sourceforge.htmlunit' +// it.exclude group: 'org.eclipse.jetty' +// } +// if (compile) { +// +// String warPath = new File(p.buildDir, 'gwt').absolutePath +// +// gwt.compile.with { +// // See https://github.com/esoco/gwt-gradle-plugin for all options +// /** The level of logging detail (ERROR, WARN, INFO, TRACE, DEBUG, SPAM, ALL) */ +// logLevel = "INFO" +// /** Where to write output files */ +// war = warPath +// /** Compile a report that tells the "Story of Your Compile". */ +// compileReport = false +// /** Compile quickly with minimal optimizations. */ +// draftCompile = false +// /** Include assert statements in compiled output. */ +// checkAssertions = false +// /** Script output style. (OBF, PRETTY, DETAILED)*/ +// style = "OBF" +// /** Sets the optimization level used by the compiler. 0=none 9=maximum. */ +// optimize = 9 +// /** Fail compilation if any input file contains an error. */ +// strict = true +// /** Specifies Java source level. ("1.6", "1.7")*/ +// sourceLevel = "11" +// /** The number of local workers to use when compiling permutations. */ +// localWorkers = 1 +// /** Emit extra information allow chrome dev tools to display Java identifiers in many places instead of JavaScript functions. (NONE, ONLY_METHOD_NAME, ABBREVIATED, FULL)*/ +//// methodNameDisplayMode = "NONE" +// +// /** Java args */ +// maxHeapSize = "1024m" +// minHeapSize = "512m" +// } +// } +// } +// +// static void addGeneratedSources(Project project, GwtCompileTask gwtc) { +// if (project.configurations.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME).dependencies) { +// (gwtc.src as ConfigurableFileCollection).from( +// (project.tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME) as JavaCompile).options.generatedSourceOutputDirectory +// ) +// } +// project.configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).allDependencies.withType(ProjectDependency)*.dependencyProject*.each { +// Project p -> addGeneratedSources(p, gwtc) +// } +// } +// +//} diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 2aba31ade74..6398392f385 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -6,6 +6,7 @@ import io.deephaven.tools.docker.WaitForHealthyContainer plugins { id 'io.deephaven.project.register' id 'io.deephaven.deephaven-in-docker' + id 'gwt-compiler' } evaluationDependsOn(Docker.registryProject('selenium')) @@ -36,12 +37,29 @@ Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') Classpaths.inheritElemental(project, 'elemental2-dom', 'implementation') -GwtTools.gwtCompile project, 'io.deephaven.web.DeephavenApi', 'Create a jar of client JS API' +//GwtTools.gwtCompile project, 'io.deephaven.web.DeephavenApi', 'Create a jar of client JS API' + +evaluationDependsOn ':web-shared-beans' +evaluationDependsOn ':web-client-backplane' +gwt { + gwtVersion = '2.11.0' + modules 'io.deephaven.web.DeephavenApi' + + src += files(project.files(project(':web-shared-beans').sourceSets.main.allSource.srcDirs)) + src += files(project.files(project(':web-client-backplane').sourceSets.main.allSource.srcDirs)) + src += files(project.files(project(':open-api-shared-fu').sourceSets.main.allSource.srcDirs)) + + maxHeapSize '1024m' + + jsInteropExports{ + setGenerate true + } +} def jsOutput = layout.buildDirectory.dir('js') def gwtOutput = tasks.register('gwtOutput', Sync) { includeEmptyDirs = false - from(tasks.getByName('gwtCompile').outputs.files) { + from(tasks.getByName('compileGwt').outputs.files) { // only copy the dhapi module, and don't give it a wrapper directory include 'dhapi/**' eachFile { it.path = 'jsapi/' + it.path.substring('dhapi/'.length()) } diff --git a/web/client-backplane/client-backplane.gradle b/web/client-backplane/client-backplane.gradle index 789e55560d8..6a43d7c2124 100644 --- a/web/client-backplane/client-backplane.gradle +++ b/web/client-backplane/client-backplane.gradle @@ -7,7 +7,7 @@ evaluationDependsOn ':proto:proto-backplane-grpc' apply from: "$rootDir/gradle/web-common.gradle" -GwtTools.gwtLib project +//GwtTools.gwtLib project Classpaths.inheritJsInterop(project, 'base', 'implementation') Classpaths.inheritJsInterop(project, 'jsinterop-annotations', 'compileOnly') Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java index 1ebc02af377..26614581ffe 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java @@ -49,8 +49,6 @@ import jsinterop.base.Js; import jsinterop.base.JsPropertyMap; -import javax.validation.constraints.NotNull; - @JsType( isNative = true, name = "dhinternal.io.deephaven.proto.table_pb_service.TableServiceClient", diff --git a/web/shared-beans/shared-beans.gradle b/web/shared-beans/shared-beans.gradle index fce2b3afde0..17b7319784b 100644 --- a/web/shared-beans/shared-beans.gradle +++ b/web/shared-beans/shared-beans.gradle @@ -5,7 +5,7 @@ plugins { apply from: "$rootDir/gradle/web-common.gradle" -GwtTools.gwtLib project +//GwtTools.gwtLib project configurations { testImplementation.extendsFrom junit diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index 16ef88a4e60..dd27d94535d 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -3,7 +3,6 @@ // package io.deephaven.web.shared.data; -import javax.annotation.Nonnull; import java.io.Serializable; /** @@ -45,7 +44,7 @@ void setLast(long last) { } @Override - public int compareTo(@Nonnull Range o) { + public int compareTo(Range o) { return Long.compare(first, o.first); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java index 3dc940db123..eff667c2899 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java @@ -3,7 +3,6 @@ // package io.deephaven.web.shared.data; -import javax.annotation.Nullable; import java.io.Serializable; import java.util.BitSet; @@ -35,7 +34,7 @@ public TableSubscriptionRequest() { } - public TableSubscriptionRequest(int subscriptionId, @Nullable RangeSet rows, BitSet columns) { + public TableSubscriptionRequest(int subscriptionId, RangeSet rows, BitSet columns) { this.subscriptionId = subscriptionId; this.rows = rows; this.columns = columns; From c3e6dba8dfdc53899e3283134af5b9c6c1fb7cc5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Feb 2024 11:56:38 -0600 Subject: [PATCH 051/219] WIP --- buildSrc/src/main/groovy/Classpaths.groovy | 3 ++- .../groovy/io.deephaven.repository-conventions.gradle | 4 ++++ web/client-api/client-api.gradle | 11 ++++++++++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index 23d98b50539..f69830ea7fe 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -24,7 +24,8 @@ class Classpaths { static final String ELEMENTAL_VERSION = '1.2.1' static final String GWT_GROUP = 'org.gwtproject' - static final String GWT_VERSION = '2.11.0' + static final String GWT_VERSION = '2.12.0-instanceof-SNAPSHOT' +// static final String GWT_VERSION = '2.11.0' static final String JAVA_PARSER_GROUP = 'com.github.javaparser' static final String JAVA_PARSER_NAME = 'javaparser-core' diff --git a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle index 1deccf352c0..92fcd2771cc 100644 --- a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle +++ b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle @@ -13,4 +13,8 @@ repositories { includeGroup 'org.apache.kafka' } } +// maven { +// url 'https://oss.sonatype.org/content/repositories/snapshots/' +// } + mavenLocal() } diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 6398392f385..6676c504be8 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -41,8 +41,14 @@ Classpaths.inheritElemental(project, 'elemental2-dom', 'implementation') evaluationDependsOn ':web-shared-beans' evaluationDependsOn ':web-client-backplane' +repositories { + maven { + url 'https://oss.sonatype.org/content/repositories/snapshots/' + } +} gwt { - gwtVersion = '2.11.0' + gwtVersion = "2.12.0-instanceof-SNAPSHOT" +// gwtVersion = "2.11.0" modules 'io.deephaven.web.DeephavenApi' src += files(project.files(project(':web-shared-beans').sourceSets.main.allSource.srcDirs)) @@ -51,9 +57,12 @@ gwt { maxHeapSize '1024m' + extraJvmArgs += ['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'] + jsInteropExports{ setGenerate true } +// extraDir layout.buildDirectory.dir('gwt/extra') } def jsOutput = layout.buildDirectory.dir('js') From dd6e35007a48c4bf6d9816df99c77ccfe27d8e04 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 25 Mar 2024 12:37:02 -0500 Subject: [PATCH 052/219] Clean up to consider merging --- buildSrc/src/main/groovy/Classpaths.groovy | 3 +-- web/client-api/client-api.gradle | 18 ++++-------------- 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index f69830ea7fe..23d98b50539 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -24,8 +24,7 @@ class Classpaths { static final String ELEMENTAL_VERSION = '1.2.1' static final String GWT_GROUP = 'org.gwtproject' - static final String GWT_VERSION = '2.12.0-instanceof-SNAPSHOT' -// static final String GWT_VERSION = '2.11.0' + static final String GWT_VERSION = '2.11.0' static final String JAVA_PARSER_GROUP = 'com.github.javaparser' static final String JAVA_PARSER_NAME = 'javaparser-core' diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 6676c504be8..028db31557d 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -10,6 +10,8 @@ plugins { } evaluationDependsOn(Docker.registryProject('selenium')) +evaluationDependsOn ':web-shared-beans' +evaluationDependsOn ':web-client-backplane' apply from: "$rootDir/gradle/web-client.gradle" @@ -37,18 +39,8 @@ Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') Classpaths.inheritElemental(project, 'elemental2-dom', 'implementation') -//GwtTools.gwtCompile project, 'io.deephaven.web.DeephavenApi', 'Create a jar of client JS API' - -evaluationDependsOn ':web-shared-beans' -evaluationDependsOn ':web-client-backplane' -repositories { - maven { - url 'https://oss.sonatype.org/content/repositories/snapshots/' - } -} gwt { - gwtVersion = "2.12.0-instanceof-SNAPSHOT" -// gwtVersion = "2.11.0" + gwtVersion = Classpaths.GWT_VERSION modules 'io.deephaven.web.DeephavenApi' src += files(project.files(project(':web-shared-beans').sourceSets.main.allSource.srcDirs)) @@ -57,9 +49,7 @@ gwt { maxHeapSize '1024m' - extraJvmArgs += ['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'] - - jsInteropExports{ + jsInteropExports { setGenerate true } // extraDir layout.buildDirectory.dir('gwt/extra') From cfbe4ca13a2fd5d5bb0a10b2b4a632b8f61ce4b0 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 5 Jun 2024 14:01:23 -0500 Subject: [PATCH 053/219] Revert plugin change, apply rules to change groupId, and clean up old code --- buildSrc/build.gradle | 2 +- buildSrc/src/main/groovy/GwtTools.groovy | 275 +++++++++--------- ...io.deephaven.repository-conventions.gradle | 4 - gradle/web-client.gradle | 8 - gradle/web-common.gradle | 20 -- open-api/lang-parser/lang-parser.gradle | 2 - open-api/lang-tools/lang-tools.gradle | 2 - open-api/shared-fu/shared-fu.gradle | 2 - web/client-api/client-api.gradle | 23 +- web/client-backplane/client-backplane.gradle | 9 +- web/shared-beans/shared-beans.gradle | 4 +- .../io/deephaven/web/shared/data/Range.java | 3 +- .../shared/data/TableSubscriptionRequest.java | 3 +- 13 files changed, 148 insertions(+), 209 deletions(-) delete mode 100644 gradle/web-client.gradle delete mode 100644 gradle/web-common.gradle diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 9230722e592..a413615a03c 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -17,7 +17,7 @@ repositories { } dependencies { - implementation('org.docstr:gwt-gradle-plugin:1.1.30') { + implementation('de.esoco.gwt:gwt-gradle-plugin:1.2.0') { exclude group: 'org.codehaus.groovy' because('needed for GwtTools') } diff --git a/buildSrc/src/main/groovy/GwtTools.groovy b/buildSrc/src/main/groovy/GwtTools.groovy index ef728cce19d..dfed4edb5a4 100644 --- a/buildSrc/src/main/groovy/GwtTools.groovy +++ b/buildSrc/src/main/groovy/GwtTools.groovy @@ -1,137 +1,138 @@ -//import de.esoco.gwt.gradle.GwtLibPlugin -//import de.esoco.gwt.gradle.GwtPlugin -//import de.esoco.gwt.gradle.extension.GwtExtension -//import de.esoco.gwt.gradle.task.GwtCheckTask -//import de.esoco.gwt.gradle.task.GwtCompileTask -//import groovy.transform.CompileStatic -//import org.gradle.api.Project -//import org.gradle.api.artifacts.ProjectDependency -//import org.gradle.api.file.ConfigurableFileCollection -//import org.gradle.api.plugins.JavaPlugin -//import org.gradle.api.tasks.compile.JavaCompile -// -//import java.nio.file.Files -// -///** -// * Helper to simplify / centralize configuring gwt plugins in build files -// */ -//@CompileStatic -//class GwtTools { -// -// static GwtExtension gwtLib(Project p) { -// p.plugins.apply(GwtLibPlugin) -// GwtExtension ext = p.extensions.getByType(GwtExtension) -// applyDefaults(p, ext) -// return ext -// } -// static GwtExtension gwtCompile(Project p, String module, String description) { -// p.plugins.apply(GwtPlugin) -// GwtExtension ext = p.extensions.getByType(GwtExtension) -// applyDefaults(p, ext, true) -// -// // Apply our module settings to and gwtc task; -// // currently, there should only be one such task, -// // but we used to have two, and may have two again later, -// // so we'll leave this setup to be friendly-for-reuse -// p.tasks.withType(GwtCompileTask).all { -// GwtCompileTask gwtc -> -// applyModuleSettings p, gwtc, module,description -// } -// // This GWT plugin will fail if tests are run after compilation, instead -// // we suppress running the test at all, and ensure that it doesn't check -// // if it even can be run until after compile finishes. -// p.tasks.withType(GwtCheckTask).configureEach {t -> -// t.mustRunAfter(p.tasks.withType(GwtCompileTask)) -// t.onlyIf { false } -// } -// -// return ext -// } -// -// static void applyModuleSettings(Project p, GwtCompileTask gwtc, String mod, String description) { -// gwtc.onlyIf WebTools.&shouldRun -// boolean gwtDev = p.findProperty('gwtDev') == 'true' -// String extras = new File(p.buildDir, "gwt/dhapi/extra").absolutePath -// -// GwtExtension gwt = p.extensions.findByType(GwtExtension) -// -// gwt.with { -// module "${mod}${gwtDev ? 'Dev' : ''}" -// compile.with { -// style = 'PRETTY' -// generateJsInteropExports = true -// // TODO move this down a line when we want to give clients js that is not super strict / rigged to blow -// checkAssertions = true -// if (gwtDev) { -// saveSource = true -// extra = extras -// logLevel = 'INFO' -// draftCompile = true -// } -// } -// } -// -// p.gradle.projectsEvaluated { -// addGeneratedSources(p, gwtc) -// } -// -// gwtDev && gwtc.doFirst { -// gwtc.logger.quiet('Running in gwt dev mode; saving source to {}/dh/src', extras) -// } -// } -// -// static void applyDefaults(Project p, GwtExtension gwt, boolean compile = false) { -// gwt.gwtVersion = Classpaths.GWT_VERSION -// gwt.jettyVersion = Classpaths.JETTY_VERSION -// p.configurations.each { -// it.exclude group: 'net.sourceforge.htmlunit' -// it.exclude group: 'org.eclipse.jetty' -// } -// if (compile) { -// -// String warPath = new File(p.buildDir, 'gwt').absolutePath -// -// gwt.compile.with { -// // See https://github.com/esoco/gwt-gradle-plugin for all options -// /** The level of logging detail (ERROR, WARN, INFO, TRACE, DEBUG, SPAM, ALL) */ -// logLevel = "INFO" -// /** Where to write output files */ -// war = warPath -// /** Compile a report that tells the "Story of Your Compile". */ -// compileReport = false -// /** Compile quickly with minimal optimizations. */ -// draftCompile = false -// /** Include assert statements in compiled output. */ -// checkAssertions = false -// /** Script output style. (OBF, PRETTY, DETAILED)*/ -// style = "OBF" -// /** Sets the optimization level used by the compiler. 0=none 9=maximum. */ -// optimize = 9 -// /** Fail compilation if any input file contains an error. */ -// strict = true -// /** Specifies Java source level. ("1.6", "1.7")*/ -// sourceLevel = "11" -// /** The number of local workers to use when compiling permutations. */ -// localWorkers = 1 -// /** Emit extra information allow chrome dev tools to display Java identifiers in many places instead of JavaScript functions. (NONE, ONLY_METHOD_NAME, ABBREVIATED, FULL)*/ -//// methodNameDisplayMode = "NONE" -// -// /** Java args */ -// maxHeapSize = "1024m" -// minHeapSize = "512m" -// } -// } -// } -// -// static void addGeneratedSources(Project project, GwtCompileTask gwtc) { -// if (project.configurations.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME).dependencies) { -// (gwtc.src as ConfigurableFileCollection).from( -// (project.tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME) as JavaCompile).options.generatedSourceOutputDirectory -// ) -// } -// project.configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).allDependencies.withType(ProjectDependency)*.dependencyProject*.each { -// Project p -> addGeneratedSources(p, gwtc) -// } -// } -// -//} +import de.esoco.gwt.gradle.GwtLibPlugin +import de.esoco.gwt.gradle.GwtPlugin +import de.esoco.gwt.gradle.extension.GwtExtension +import de.esoco.gwt.gradle.task.GwtCheckTask +import de.esoco.gwt.gradle.task.GwtCompileTask +import groovy.transform.CompileStatic +import org.gradle.api.Project +import org.gradle.api.artifacts.Configuration +import org.gradle.api.artifacts.ProjectDependency +import org.gradle.api.file.ConfigurableFileCollection +import org.gradle.api.plugins.JavaPlugin +import org.gradle.api.tasks.compile.JavaCompile + +/** + * Helper to simplify / centralize configuring gwt plugins in build files + */ +@CompileStatic +class GwtTools { + + static GwtExtension gwtLib(Project p) { + p.plugins.apply(GwtLibPlugin) + GwtExtension ext = p.extensions.getByType(GwtExtension) + applyDefaults(p, ext) + return ext + } + static GwtExtension gwtCompile(Project p, String module, String description) { + p.plugins.apply(GwtPlugin) + GwtExtension ext = p.extensions.getByType(GwtExtension) + applyDefaults(p, ext, true) + + // Apply our module settings to and gwtc task; + // currently, there should only be one such task, + // but we used to have two, and may have two again later, + // so we'll leave this setup to be friendly-for-reuse + p.tasks.withType(GwtCompileTask).all { + GwtCompileTask gwtc -> + applyModuleSettings p, gwtc, module,description + } + // This GWT plugin will fail if tests are run after compilation, instead + // we suppress running the test at all, and ensure that it doesn't check + // if it even can be run until after compile finishes. + p.tasks.withType(GwtCheckTask).configureEach {t -> + t.mustRunAfter(p.tasks.withType(GwtCompileTask)) + t.onlyIf { false } + } + + return ext + } + + static void applyModuleSettings(Project p, GwtCompileTask gwtc, String mod, String description) { + gwtc.onlyIf WebTools.&shouldRun + boolean gwtDev = p.findProperty('gwtDev') == 'true' + String extras = new File(p.buildDir, "gwt/dhapi/extra").absolutePath + + GwtExtension gwt = p.extensions.findByType(GwtExtension) + + gwt.with { + module "${mod}${gwtDev ? 'Dev' : ''}" + compile.with { + style = 'PRETTY' + generateJsInteropExports = true + // TODO move this down a line when we want to give clients js that is not super strict / rigged to blow + checkAssertions = true + if (gwtDev) { + saveSource = true + extra = extras + logLevel = 'INFO' + draftCompile = true + } + } + } + + p.gradle.projectsEvaluated { + addGeneratedSources(p, gwtc) + } + + gwtDev && gwtc.doFirst { + gwtc.logger.quiet('Running in gwt dev mode; saving source to {}/dh/src', extras) + } + } + + static void applyDefaults(Project p, GwtExtension gwt, boolean compile = false) { + gwt.gwtVersion = Classpaths.GWT_VERSION + gwt.jettyVersion = Classpaths.JETTY_VERSION + p.configurations.all { Configuration c -> + c.resolutionStrategy.dependencySubstitution { sub -> + sub.substitute(sub.module("com.google.gwt:gwt-codeserver")).using(sub.module('org.gwtproject:gwt-codeserver:2.11.0')) + sub.substitute(sub.module("com.google.gwt:gwt-user")).using(sub.module('org.gwtproject:gwt-user:2.11.0')) + sub.substitute(sub.module("com.google.gwt:gwt-dev")).using(sub.module('org.gwtproject:gwt-dev:2.11.0')) + } + } + if (compile) { + String warPath = new File(p.buildDir, 'gwt').absolutePath + + gwt.compile.with { + // See https://github.com/esoco/gwt-gradle-plugin for all options + /** The level of logging detail (ERROR, WARN, INFO, TRACE, DEBUG, SPAM, ALL) */ + logLevel = "INFO" + /** Where to write output files */ + war = warPath + /** Compile a report that tells the "Story of Your Compile". */ + compileReport = false + /** Compile quickly with minimal optimizations. */ + draftCompile = false + /** Include assert statements in compiled output. */ + checkAssertions = false + /** Script output style. (OBF, PRETTY, DETAILED)*/ + style = "OBF" + /** Sets the optimization level used by the compiler. 0=none 9=maximum. */ + optimize = 9 + /** Fail compilation if any input file contains an error. */ + strict = true + /** Specifies Java source level. ("1.6", "1.7")*/ + sourceLevel = "11" + /** The number of local workers to use when compiling permutations. */ + localWorkers = 1 + /** Emit extra information allow chrome dev tools to display Java identifiers in many places instead of JavaScript functions. (NONE, ONLY_METHOD_NAME, ABBREVIATED, FULL)*/ +// methodNameDisplayMode = "NONE" + + /** Java args */ + maxHeapSize = "1024m" + minHeapSize = "512m" + } + } + } + + static void addGeneratedSources(Project project, GwtCompileTask gwtc) { + if (project.configurations.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME).dependencies) { + (gwtc.src as ConfigurableFileCollection).from( + (project.tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME) as JavaCompile).options.generatedSourceOutputDirectory + ) + } + project.configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).allDependencies.withType(ProjectDependency)*.dependencyProject*.each { + Project p -> addGeneratedSources(p, gwtc) + } + } + +} diff --git a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle index 92fcd2771cc..1deccf352c0 100644 --- a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle +++ b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle @@ -13,8 +13,4 @@ repositories { includeGroup 'org.apache.kafka' } } -// maven { -// url 'https://oss.sonatype.org/content/repositories/snapshots/' -// } - mavenLocal() } diff --git a/gradle/web-client.gradle b/gradle/web-client.gradle deleted file mode 100644 index 6f909db8d0c..00000000000 --- a/gradle/web-client.gradle +++ /dev/null @@ -1,8 +0,0 @@ -apply from: "$rootDir/gradle/web-common.gradle" - -// To perform production compile: gw gCo (./gradlew gwtCompile) -// To start the dev server: gw gCS (./gradlew gwtCodeServer) - -Classpaths.inheritGwt project, 'gwt-user', 'compileOnly' -Classpaths.inheritGwt project, 'gwt-user', 'testImplementation' -Classpaths.inheritGwt project, 'gwt-dev', 'testImplementation' diff --git a/gradle/web-common.gradle b/gradle/web-common.gradle deleted file mode 100644 index af8341ff820..00000000000 --- a/gradle/web-common.gradle +++ /dev/null @@ -1,20 +0,0 @@ -// basics of a java project; -apply plugin: 'java' - -// we are explicitly setting sourceSets to "standard defaults" -// since the rest of dh uses simple `src` folder names, -// and we use "project already configured themselves" -// to tell the root buildscript to leave these alone. -sourceSets { - main { - java.setSrcDirs (['src/main/java']) - resources.setSrcDirs (['src/main/resources']) - resources.setExcludes([]) - } - test { - java.setSrcDirs (['src/test/java']) - resources.setSrcDirs (['src/test/resources']) - resources.setExcludes([]) - } -} - diff --git a/open-api/lang-parser/lang-parser.gradle b/open-api/lang-parser/lang-parser.gradle index d785dc7c187..dc1b7bf805f 100644 --- a/open-api/lang-parser/lang-parser.gradle +++ b/open-api/lang-parser/lang-parser.gradle @@ -7,8 +7,6 @@ plugins { id 'io.deephaven.project.register' } -apply from: "$rootDir/gradle/web-common.gradle" - dependencies { implementation project(':Util') api project(':open-api-shared-fu') diff --git a/open-api/lang-tools/lang-tools.gradle b/open-api/lang-tools/lang-tools.gradle index 65745757d87..b91c8029f48 100644 --- a/open-api/lang-tools/lang-tools.gradle +++ b/open-api/lang-tools/lang-tools.gradle @@ -4,8 +4,6 @@ plugins { id 'java-library' } -apply from: "$rootDir/gradle/web-common.gradle" - Classpaths.inheritCommonsText(project, 'implementation') dependencies { diff --git a/open-api/shared-fu/shared-fu.gradle b/open-api/shared-fu/shared-fu.gradle index 143e4e15d74..6dbf79ed1cb 100644 --- a/open-api/shared-fu/shared-fu.gradle +++ b/open-api/shared-fu/shared-fu.gradle @@ -2,6 +2,4 @@ plugins { id 'io.deephaven.project.register' } -apply from: "$rootDir/gradle/web-common.gradle" - // Contains all general purpose utilities used across all open-api modules. diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 028db31557d..ad7dac4b5ea 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -6,14 +6,9 @@ import io.deephaven.tools.docker.WaitForHealthyContainer plugins { id 'io.deephaven.project.register' id 'io.deephaven.deephaven-in-docker' - id 'gwt-compiler' } evaluationDependsOn(Docker.registryProject('selenium')) -evaluationDependsOn ':web-shared-beans' -evaluationDependsOn ':web-client-backplane' - -apply from: "$rootDir/gradle/web-client.gradle" configurations { js @@ -39,26 +34,12 @@ Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') Classpaths.inheritElemental(project, 'elemental2-dom', 'implementation') -gwt { - gwtVersion = Classpaths.GWT_VERSION - modules 'io.deephaven.web.DeephavenApi' - - src += files(project.files(project(':web-shared-beans').sourceSets.main.allSource.srcDirs)) - src += files(project.files(project(':web-client-backplane').sourceSets.main.allSource.srcDirs)) - src += files(project.files(project(':open-api-shared-fu').sourceSets.main.allSource.srcDirs)) - - maxHeapSize '1024m' - - jsInteropExports { - setGenerate true - } -// extraDir layout.buildDirectory.dir('gwt/extra') -} +GwtTools.gwtCompile project, 'io.deephaven.web.DeephavenApi', 'Create a jar of client JS API' def jsOutput = layout.buildDirectory.dir('js') def gwtOutput = tasks.register('gwtOutput', Sync) { includeEmptyDirs = false - from(tasks.getByName('compileGwt').outputs.files) { + from(tasks.getByName('gwtCompile').outputs.files) { // only copy the dhapi module, and don't give it a wrapper directory include 'dhapi/**' eachFile { it.path = 'jsapi/' + it.path.substring('dhapi/'.length()) } diff --git a/web/client-backplane/client-backplane.gradle b/web/client-backplane/client-backplane.gradle index 6a43d7c2124..0a9fcab9750 100644 --- a/web/client-backplane/client-backplane.gradle +++ b/web/client-backplane/client-backplane.gradle @@ -2,13 +2,8 @@ plugins { id 'io.deephaven.project.register' } -evaluationDependsOn ':proto' -evaluationDependsOn ':proto:proto-backplane-grpc' - -apply from: "$rootDir/gradle/web-common.gradle" - -//GwtTools.gwtLib project +GwtTools.gwtLib project Classpaths.inheritJsInterop(project, 'base', 'implementation') Classpaths.inheritJsInterop(project, 'jsinterop-annotations', 'compileOnly') Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') -Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') \ No newline at end of file +Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') diff --git a/web/shared-beans/shared-beans.gradle b/web/shared-beans/shared-beans.gradle index 17b7319784b..8cbca1fba04 100644 --- a/web/shared-beans/shared-beans.gradle +++ b/web/shared-beans/shared-beans.gradle @@ -3,9 +3,7 @@ plugins { id 'java-library' } -apply from: "$rootDir/gradle/web-common.gradle" - -//GwtTools.gwtLib project +GwtTools.gwtLib project configurations { testImplementation.extendsFrom junit diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index dd27d94535d..16ef88a4e60 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -3,6 +3,7 @@ // package io.deephaven.web.shared.data; +import javax.annotation.Nonnull; import java.io.Serializable; /** @@ -44,7 +45,7 @@ void setLast(long last) { } @Override - public int compareTo(Range o) { + public int compareTo(@Nonnull Range o) { return Long.compare(first, o.first); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java index eff667c2899..3dc940db123 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java @@ -3,6 +3,7 @@ // package io.deephaven.web.shared.data; +import javax.annotation.Nullable; import java.io.Serializable; import java.util.BitSet; @@ -34,7 +35,7 @@ public TableSubscriptionRequest() { } - public TableSubscriptionRequest(int subscriptionId, RangeSet rows, BitSet columns) { + public TableSubscriptionRequest(int subscriptionId, @Nullable RangeSet rows, BitSet columns) { this.subscriptionId = subscriptionId; this.rows = rows; this.columns = columns; From 7e16d95a65c385bf937e7b353113aa187a428c72 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 5 Jun 2024 16:46:38 -0500 Subject: [PATCH 054/219] Restore test dependency, and fix elemental changes in tests --- web/client-api/client-api.gradle | 1 + .../io/deephaven/web/client/api/AbstractAsyncGwtTestCase.java | 4 ++-- .../io/deephaven/web/client/api/TableManipulationTestGwt.java | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index ad7dac4b5ea..1c480dc5b8f 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -33,6 +33,7 @@ dependencies { Classpaths.inheritElemental(project, 'elemental2-core', 'implementation') Classpaths.inheritElemental(project, 'elemental2-promise', 'implementation') Classpaths.inheritElemental(project, 'elemental2-dom', 'implementation') +Classpaths.inheritGwt(project, 'gwt-user', 'testImplementation') GwtTools.gwtCompile project, 'io.deephaven.web.DeephavenApi', 'Create a jar of client JS API' diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/AbstractAsyncGwtTestCase.java b/web/client-api/src/test/java/io/deephaven/web/client/api/AbstractAsyncGwtTestCase.java index 7b36f3b98a5..557f0ec2952 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/AbstractAsyncGwtTestCase.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/AbstractAsyncGwtTestCase.java @@ -376,7 +376,7 @@ protected Promise assertNextViewportIs(JsTable table, Function r.get(a)); + return viewportData.getRows().map((r, index) -> r.get(a)); } protected Promise assertNextViewportIs(JsTable table, double... expected) { @@ -389,7 +389,7 @@ protected Promise assertNextViewportIs(JsTable table, double... expecte public static List filterColumns(JsTable table, JsPredicate filter) { List matches = new ArrayList<>(); - table.getColumns().forEach((c, i, arr) -> { + table.getColumns().forEach((c, i) -> { if (filter.test(c)) { matches.add(c); } diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/TableManipulationTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/TableManipulationTestGwt.java index b0299cbc9ca..3cea8926e8e 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/TableManipulationTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/TableManipulationTestGwt.java @@ -505,7 +505,7 @@ public void testDateTimeInFilters() { table.setViewport(0, 2, null); return assertUpdateReceived(table, viewportData -> { - viewportData.getRows().forEach((row, index, all) -> { + viewportData.getRows().forEach((row, index) -> { dates.add(row.get(table.findColumn("Timestamp")).cast()); return null; }); From 7316923d353e1213566568495e46bf029abd9733 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 6 Jun 2024 16:37:11 -0500 Subject: [PATCH 055/219] Introduce runtime version checks for flatbuffer compatibility --- buildSrc/src/main/groovy/Classpaths.groovy | 2 +- .../extensions/barrage/util/BarrageUtil.java | 74 +++++++++++++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/Classpaths.groovy b/buildSrc/src/main/groovy/Classpaths.groovy index caf4f5769d1..e143b1db2c0 100644 --- a/buildSrc/src/main/groovy/Classpaths.groovy +++ b/buildSrc/src/main/groovy/Classpaths.groovy @@ -52,7 +52,7 @@ class Classpaths { static final String FLATBUFFER_GROUP = 'com.google.flatbuffers' static final String FLATBUFFER_NAME = 'flatbuffers-java' - static final String FLATBUFFER_VERSION = '2.0.3' + static final String FLATBUFFER_VERSION = '1.12.0' static final String DAGGER_GROUP = 'com.google.dagger' static final String DAGGER_NAME = 'dagger' diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 2bb0709898a..6a22de1d830 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -3,11 +3,13 @@ // package io.deephaven.extensions.barrage.util; +import com.google.flatbuffers.Constants; import com.google.flatbuffers.FlatBufferBuilder; import com.google.protobuf.ByteString; import com.google.protobuf.ByteStringAccess; import com.google.rpc.Code; import io.deephaven.UncheckedDeephavenException; +import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; import io.deephaven.base.ArrayUtil; import io.deephaven.base.ClassUtil; import io.deephaven.base.verify.Assert; @@ -44,6 +46,7 @@ import io.deephaven.vector.Vector; import io.grpc.stub.StreamObserver; import org.apache.arrow.flatbuf.KeyValue; +import org.apache.arrow.flatbuf.Message; import org.apache.arrow.util.Collections2; import org.apache.arrow.vector.types.TimeUnit; import org.apache.arrow.vector.types.Types; @@ -56,6 +59,8 @@ import org.jetbrains.annotations.Nullable; import java.lang.reflect.Array; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Instant; @@ -64,6 +69,8 @@ import java.time.ZonedDateTime; import java.util.*; import java.util.function.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -105,6 +112,73 @@ public class BarrageUtil { private static final String ATTR_TYPE_TAG = "type"; private static final String ATTR_COMPONENT_TYPE_TAG = "componentType"; + private static final boolean ENFORCE_FLATBUFFER_VERSION_CHECK = + Configuration.getInstance().getBooleanWithDefault("barrage.version.check", true); + + static { + verifyFlatbufferCompatibility(Message.class); + verifyFlatbufferCompatibility(BarrageMessageWrapper.class); + } + + private static void verifyFlatbufferCompatibility(Class clazz) { + try { + clazz.getMethod("ValidateVersion").invoke(null); + } catch (InvocationTargetException e) { + Throwable targetException = e.getTargetException(); + if (targetException instanceof NoSuchMethodError) { + // Caused when the reflective method is found and cannot be used because the flatbuffer version doesn't + // match + String requiredVersion = extractFlatBufferVersion(targetException.getMessage()) + .orElseThrow(() -> new UncheckedDeephavenException( + "FlatBuffers version mismatch, can't read expected version", targetException)); + Optional foundVersion = Arrays.stream(Constants.class.getDeclaredMethods()) + .map(Method::getName) + .map(BarrageUtil::extractFlatBufferVersion) + .filter(Optional::isPresent) + .map(Optional::get) + .findFirst(); + String dependentLibrary = clazz.getPackage().getSpecificationTitle(); + final String message; + if (foundVersion.isEmpty()) { + message = "Library '" + dependentLibrary + "' requires FlatBuffer " + requiredVersion + + ", cannot detect present version"; + } else { + message = "Library '" + dependentLibrary + "' requires FlatBuffer " + requiredVersion + ", found " + + foundVersion.get(); + } + if (ENFORCE_FLATBUFFER_VERSION_CHECK) { + throw new UncheckedDeephavenException(message); + } else { + log.warn().append(message).endl(); + } + } else { + throw new UncheckedDeephavenException("Cannot validate flatbuffer compatibility, unexpected exception", + targetException); + } + } catch (IllegalAccessException e) { + throw new UncheckedDeephavenException( + "Cannot validate flatbuffer compatibility, " + clazz + "'s ValidateVersion() isn't accessible!", e); + } catch (NoSuchMethodException e) { + // Caused when the type isn't actually a flatbuffer Table (or the codegen format has changed) + throw new UncheckedDeephavenException( + "Cannot validate flatbuffer compatibility, " + clazz + " is not a flatbuffer table!", e); + } + } + + private static Optional extractFlatBufferVersion(String method) { + Matcher matcher = Pattern.compile("FLATBUFFERS_([0-9]+)_([0-9]+)_([0-9]+)").matcher(method); + + if (matcher.find()) { + if (Integer.valueOf(matcher.group(1)) <= 2) { + // semver, third decimal doesn't matter + return Optional.of(matcher.group(1) + "." + matcher.group(2) + ".x"); + } + // "date" version, all three components should be shown + return Optional.of(matcher.group(1) + "." + matcher.group(2) + "." + matcher.group(3)); + } + return Optional.empty(); + } + /** * These are the types that get special encoding but are otherwise not primitives. TODO (core#58): add custom * barrage serialization/deserialization support From b6c8c74c9a2151d2f63518a55baf57dc4b0a69ee Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 6 Jun 2024 20:09:16 -0500 Subject: [PATCH 056/219] Tests should also use 11 --- web/client-api/client-api.gradle | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 73efd301637..2ca0d47c8ff 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -88,7 +88,8 @@ artifacts { def gwtUnitTest = tasks.register('gwtUnitTest', Test) { t -> t.systemProperties = [ - 'gwt.args': ['-runStyle HtmlUnit', + 'gwt.args': ['-sourceLevel auto', + '-runStyle HtmlUnit', '-ea', '-style PRETTY', "-war ${layout.buildDirectory.dir('unitTest-war').get().asFile.absolutePath}" @@ -158,7 +159,8 @@ def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> t.finalizedBy(deephavenDocker.endTask, stopSelenium) doFirst { def webdriverUrl = "http://localhost:${seleniumPort}/" - t.systemProperty('gwt.args', ["-runStyle io.deephaven.web.junit.RunStyleRemoteWebDriver:${webdriverUrl}?firefox", + t.systemProperty('gwt.args', ['-sourceLevel auto', + "-runStyle io.deephaven.web.junit.RunStyleRemoteWebDriver:${webdriverUrl}?firefox", '-ea', '-style PRETTY', "-setProperty dh.server=http://${deephavenDocker.containerName.get()}:10000", From ab4b8d35db4656d04f152528b545b2d06a3c3d4a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 7 Jun 2024 16:02:38 -0500 Subject: [PATCH 057/219] Enable BigDecimal/BigInteger support (with gwt 2.12's emul) --- .../chunk/ChunkInputStreamGenerator.java | 48 +- .../web/super/java/math/BigInteger.java | 1599 +++++++++++++++++ 2 files changed, 1623 insertions(+), 24 deletions(-) create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/math/BigInteger.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index caac27f991c..70cea430308 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -263,30 +263,30 @@ static WritableChunk extractChunkFromInputStream( // options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, // outChunk, outOffset, totalRows); // } - // if (type == BigInteger.class) { - // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - // is, - // fieldNodeIter, - // bufferInfoIter, - // BigInteger::new, - // outChunk, outOffset, totalRows); - // } - // if (type == BigDecimal.class) { - // return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - // is, - // fieldNodeIter, - // bufferInfoIter, - // (final byte[] buf, final int offset, final int length) -> { - // // read the int scale value as little endian, arrow's endianness. - // final byte b1 = buf[offset]; - // final byte b2 = buf[offset + 1]; - // final byte b3 = buf[offset + 2]; - // final byte b4 = buf[offset + 3]; - // final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - // return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - // }, - // outChunk, outOffset, totalRows); - // } + if (type == BigInteger.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); + } + if (type == BigDecimal.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); + } // if (type == Instant.class) { // return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( // Long.BYTES, options, io -> { diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/math/BigInteger.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/math/BigInteger.java new file mode 100644 index 00000000000..500451f2f96 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/math/BigInteger.java @@ -0,0 +1,1599 @@ +/* + * Copyright 2009 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + * + * INCLUDES MODIFICATIONS BY RICHARD ZSCHECH AS WELL AS GOOGLE. + */ +package java.math; + +import static javaemul.internal.Coercions.ensureInt; +import static javaemul.internal.InternalPreconditions.checkCriticalArgument; +import static javaemul.internal.InternalPreconditions.checkNotNull; + +import java.io.Serializable; +import java.util.Random; +import javaemul.internal.LongUtils; + +/** + * This class represents immutable integer numbers of arbitrary length. Large + * numbers are typically used in security applications and therefore BigIntegers + * offer dedicated functionality like the generation of large prime numbers or + * the computation of modular inverse. + *

+ * Since the class was modeled to offer all the functionality as the + * {@link Integer} class does, it provides even methods that operate bitwise on + * a two's complement representation of large integers. Note however that the + * implementations favors an internal representation where magnitude and sign + * are treated separately. Hence such operations are inefficient and should be + * discouraged. In simple words: Do NOT implement any bit fields based on + * BigInteger. + */ +public class BigInteger extends Number implements Comparable, + Serializable { + + /** + * The {@code BigInteger} constant 1. + */ + public static final BigInteger ONE = new BigInteger(1, 1); + + /* Fields used for the internal representation. */ + + /** + * The {@code BigInteger} constant 10. + */ + public static final BigInteger TEN = new BigInteger(1, 10); + + /** + * The {@code BigInteger} constant 0. + */ + public static final BigInteger ZERO = new BigInteger(0, 0); + + /** + * The {@code BigInteger} constant 0 used for comparison. + */ + static final int EQUALS = 0; + + /** + * The {@code BigInteger} constant 1 used for comparison. + */ + static final int GREATER = 1; + + /** + * The {@code BigInteger} constant -1 used for comparison. + */ + static final int LESS = -1; + + /** + * The {@code BigInteger} constant -1. + */ + static final BigInteger MINUS_ONE = new BigInteger(-1, 1); + + /** + * All the {@code BigInteger} numbers in the range [0,10] are cached. + */ + static final BigInteger[] SMALL_VALUES = { + ZERO, ONE, new BigInteger(1, 2), new BigInteger(1, 3), + new BigInteger(1, 4), new BigInteger(1, 5), new BigInteger(1, 6), + new BigInteger(1, 7), new BigInteger(1, 8), new BigInteger(1, 9), TEN}; + + static final BigInteger[] TWO_POWS; + + /** + * This is the serialVersionUID used by the sun implementation. + */ + private static final long serialVersionUID = -8287574255936472291L; + + static { + TWO_POWS = new BigInteger[32]; + for (int i = 0; i < TWO_POWS.length; i++) { + TWO_POWS[i] = BigInteger.valueOf(1L << i); + } + } + + /** + * Returns a random positive {@code BigInteger} instance in the range [0, + * 2^(bitLength)-1] which is probably prime. The probability that the returned + * {@code BigInteger} is prime is beyond (1-1/2^80). + *

+ * Implementation Note: Currently {@code rnd} is ignored. + * + * @param bitLength length of the new {@code BigInteger} in bits. + * @param rnd random generator used to generate the new {@code BigInteger}. + * @return probably prime random {@code BigInteger} instance. + * @throws ArithmeticException if {@code bitLength < 2}. + */ + public static BigInteger probablePrime(int bitLength, Random rnd) { + return new BigInteger(bitLength, 100, rnd); + } + + public static BigInteger valueOf(long val) { + return val >= 0 ? BigInteger.fromBits(val) : BigInteger.fromBits(-val).negate(); + } + + private static BigInteger fromBits(long bits) { + int lowBits = (int) bits; + int highBits = LongUtils.getHighBits(bits); + if (highBits != 0) { + return new BigInteger(1, lowBits, highBits); + } + if (lowBits > 10 || lowBits < 0) { + return new BigInteger(1, lowBits); + } + return SMALL_VALUES[lowBits]; + } + + static BigInteger getPowerOfTwo(int exp) { + if (exp < TWO_POWS.length) { + return TWO_POWS[exp]; + } + int intCount = exp >> 5; + int bitN = exp & 31; + int resDigits[] = new int[intCount + 1]; + resDigits[intCount] = 1 << bitN; + return new BigInteger(1, intCount + 1, resDigits); + } + + /** + * @see BigInteger#BigInteger(String, int) + */ + private static void setFromString(BigInteger bi, String val, int radix) { + int sign; + int[] digits; + int numberLength; + int stringLength = val.length(); + int startChar; + int endChar = stringLength; + + if (val.charAt(0) == '-') { + sign = -1; + startChar = 1; + stringLength--; + } else { + sign = 1; + startChar = 0; + } + /* + * We use the following algorithm: split a string into portions of n + * characters and convert each portion to an integer according to the radix. + * Then convert an exp(radix, n) based number to binary using the + * multiplication method. See D. Knuth, The Art of Computer Programming, + * vol. 2. + */ + + int charsPerInt = Conversion.digitFitInInt[radix]; + int bigRadixDigitsLength = stringLength / charsPerInt; + int topChars = stringLength % charsPerInt; + + if (topChars != 0) { + bigRadixDigitsLength++; + } + digits = new int[bigRadixDigitsLength]; + // Get the maximal power of radix that fits in int + int bigRadix = Conversion.bigRadices[radix - 2]; + // Parse an input string and accumulate the BigInteger's magnitude + int digitIndex = 0; // index of digits array + int substrEnd = startChar + ((topChars == 0) ? charsPerInt : topChars); + int newDigit; + + for (int substrStart = startChar; substrStart < endChar; substrStart = substrEnd, substrEnd = substrStart + + charsPerInt) { + int bigRadixDigit = Integer.parseInt( + val.substring(substrStart, substrEnd), radix); + newDigit = Multiplication.multiplyByInt(digits, digitIndex, bigRadix); + newDigit += Elementary.inplaceAdd(digits, digitIndex, bigRadixDigit); + digits[digitIndex++] = newDigit; + } + numberLength = digitIndex; + bi.sign = sign; + bi.numberLength = numberLength; + bi.digits = digits; + bi.cutOffLeadingZeroes(); + } + + /** + * The magnitude of this big integer. This array is in little endian order and + * each "digit" is a 32-bit unsigned integer. For example: {@code 13} is + * represented as [ 13 ] {@code -13} is represented as [ 13 ] {@code 2^32 + + * 13} is represented as [ 13, 1 ] {@code 2^64 + 13} is represented as [ 13, + * 0, 1 ] {@code 2^31} is represented as [ Integer.MIN_VALUE ] The magnitude + * array may be longer than strictly necessary, which results in additional + * trailing zeros. + * + *

TODO(jat): consider changing to 24-bit integers for better performance + * in browsers. + */ + transient int digits[]; + + /** + * The length of this in measured in ints. Can be less than digits.length(). + */ + transient int numberLength; + + /** + * The sign of this. + */ + transient int sign; + + private transient int firstNonzeroDigit = -2; + + /** + * Cache for the hash code. + */ + private transient int hashCode = 0; + + /** + * Constructs a new {@code BigInteger} from the given two's complement + * representation. The most significant byte is the entry at index 0. The most + * significant bit of this entry determines the sign of the new {@code + * BigInteger} instance. The given array must not be empty. + * + * @param val two's complement representation of the new {@code BigInteger}. + * @throws NullPointerException if {@code val == null}. + * @throws NumberFormatException if the length of {@code val} is zero. + */ + public BigInteger(byte[] val) { + this(val, 0, val.length); + } + + /** + * Constructs a new {@code BigInteger} from the given two's complement + * representation. The most significant byte is the entry at index 0. The most + * significant bit of this entry determines the sign of the new {@code + * BigInteger} instance. The given array must not be empty. + * + * @param val two's complement representation of the new {@code BigInteger}. + * @param offset the start offset of the binary representation. + * @param length the number of bytes to use. + * @throws NullPointerException if {@code val == null}. + * @throws NumberFormatException if the length of {@code val} is zero. + */ + public BigInteger(byte[] val, int offset, int length) { + if (val.length == 0) { + // math.12=Zero length BigInteger + throw new NumberFormatException("Zero length BigInteger"); //$NON-NLS-1$ + } + if (length < 0 || offset < 0 || length > val.length - offset) { + throw new IndexOutOfBoundsException("Range check failed: offset=" + offset + ", length=" + + length + ", val.length=" + val.length); + } + if (length == 0) { + sign = 0; + numberLength = 1; + digits = new int[] {0}; + return; + } + if (val[offset] < 0) { + sign = -1; + putBytesNegativeToIntegers(val, offset, length); + } else { + sign = 1; + putBytesPositiveToIntegers(val, offset, length); + } + cutOffLeadingZeroes(); + } + + /** + * Constructs a new {@code BigInteger} instance with the given sign and the + * given magnitude. The sign is given as an integer (-1 for negative, 0 for + * zero, 1 for positive). The magnitude is specified as a byte array. The most + * significant byte is the entry at index 0. + * + * @param signum sign of the new {@code BigInteger} (-1 for negative, 0 for + * zero, 1 for positive). + * @param magnitude magnitude of the new {@code BigInteger} with the most + * significant byte first. + * @throws NullPointerException if {@code magnitude == null}. + * @throws NumberFormatException if the sign is not one of -1, 0, 1 or if the + * sign is zero and the magnitude contains non-zero entries. + */ + public BigInteger(int signum, byte[] magnitude) { + this(signum, magnitude, 0, magnitude.length); + } + + /** + * Constructs a new {@code BigInteger} instance with the given sign and the + * given magnitude. The sign is given as an integer (-1 for negative, 0 for + * zero, 1 for positive). The magnitude is specified as a byte array. The most + * significant byte is the entry at index 0. + * + * @param signum sign of the new {@code BigInteger} (-1 for negative, 0 for + * zero, 1 for positive). + * @param magnitude magnitude of the new {@code BigInteger} with the most + * significant byte first. + * @param offset the start offset of the binary representation. + * @param length the number of bytes to use. + * @throws NullPointerException if {@code magnitude == null}. + * @throws NumberFormatException if the sign is not one of -1, 0, 1 or if the + * sign is zero and the magnitude contains non-zero entries. + */ + public BigInteger(int signum, byte[] magnitude, int offset, int length) { + checkNotNull(magnitude); + + if (length < 0 || offset < 0 || length > magnitude.length - offset) { + throw new IndexOutOfBoundsException("Range check failed: offset=" + offset + ", length=" + + length + ", val.length=" + magnitude.length); + } + + if ((signum < -1) || (signum > 1)) { + // math.13=Invalid signum value + throw new NumberFormatException("Invalid signum value"); //$NON-NLS-1$ + } + if (signum == 0) { + for (int index = offset; index < offset + length; index++) { + byte element = magnitude[index]; + if (element != 0) { + // math.14=signum-magnitude mismatch + throw new NumberFormatException("signum-magnitude mismatch"); //$NON-NLS-1$ + } + } + } + if (length == 0) { + sign = 0; + numberLength = 1; + digits = new int[] {0}; + } else { + sign = signum; + putBytesPositiveToIntegers(magnitude, offset, length); + cutOffLeadingZeroes(); + } + } + + /** + * Constructs a random {@code BigInteger} instance in the range [0, + * 2^(bitLength)-1] which is probably prime. The probability that the returned + * {@code BigInteger} is prime is beyond (1-1/2^certainty). + * + * @param bitLength length of the new {@code BigInteger} in bits. + * @param certainty tolerated primality uncertainty. + * @param rnd is an optional random generator to be used. + * @throws ArithmeticException if {@code bitLength} < 2. + */ + public BigInteger(int bitLength, int certainty, Random rnd) { + if (bitLength < 2) { + // math.1C=bitLength < 2 + throw new ArithmeticException("bitLength < 2"); //$NON-NLS-1$ + } + BigInteger me = Primality.consBigInteger(bitLength, certainty, rnd); + sign = me.sign; + numberLength = me.numberLength; + digits = me.digits; + } + + /** + * Constructs a random non-negative {@code BigInteger} instance in the range + * [0, 2^(numBits)-1]. + * + * @param numBits maximum length of the new {@code BigInteger} in bits. + * @param rnd is an optional random generator to be used. + * @throws IllegalArgumentException if {@code numBits} < 0. + */ + public BigInteger(int numBits, Random rnd) { + checkCriticalArgument(numBits >= 0, "numBits must be non-negative"); + + if (numBits == 0) { + sign = 0; + numberLength = 1; + digits = new int[] {0}; + } else { + sign = 1; + numberLength = (numBits + 31) >> 5; + digits = new int[numberLength]; + for (int i = 0; i < numberLength; i++) { + digits[i] = rnd.nextInt(); + } + // Using only the necessary bits + digits[numberLength - 1] >>>= (-numBits) & 31; + cutOffLeadingZeroes(); + } + } + + /** + * Constructs a new {@code BigInteger} instance from the string + * representation. The string representation consists of an optional minus + * sign followed by a non-empty sequence of decimal digits. + * + * @param val string representation of the new {@code BigInteger}. + * @throws NullPointerException if {@code val == null}. + * @throws NumberFormatException if {@code val} is not a valid representation + * of a {@code BigInteger}. + */ + public BigInteger(String val) { + this(val, 10); + } + + /** + * Constructs a new {@code BigInteger} instance from the string + * representation. The string representation consists of an optional minus + * sign followed by a non-empty sequence of digits in the specified radix. For + * the conversion the method {@code Character.digit(char, radix)} is used. + * + * @param val string representation of the new {@code BigInteger}. + * @param radix the base to be used for the conversion. + * @throws NullPointerException if {@code val == null}. + * @throws NumberFormatException if {@code val} is not a valid representation + * of a {@code BigInteger} or if {@code radix < Character.MIN_RADIX} + * or {@code radix > Character.MAX_RADIX}. + */ + public BigInteger(String val, int radix) { + checkNotNull(val); + + if ((radix < Character.MIN_RADIX) || (radix > Character.MAX_RADIX)) { + // math.11=Radix out of range + throw new NumberFormatException("Radix out of range"); //$NON-NLS-1$ + } + if (val.isEmpty()) { + // math.12=Zero length BigInteger + throw new NumberFormatException("Zero length BigInteger"); //$NON-NLS-1$ + } + setFromString(this, val, radix); + } + + /** + * Constructs a number which array is of size 1. + * + * @param sign the sign of the number + * @param value the only one digit of array + */ + BigInteger(int sign, int bits) { + this(sign, 1, new int[] {bits}); + } + + BigInteger(int sign, int lowBits, int highBits) { + this(sign, 2, new int[] {lowBits, highBits}); + } + + /** + * Creates a new {@code BigInteger} with the given sign and magnitude. This constructor does not + * create a copy, so any changes to the reference will affect the new number. + * + * @param signum The sign of the number represented by {@code digits} + * @param digits The magnitude of the number + */ + BigInteger(int signum, int digits[]) { + if (digits.length == 0) { + sign = 0; + numberLength = 1; + this.digits = new int[] {0}; + } else { + sign = signum; + numberLength = digits.length; + this.digits = digits; + cutOffLeadingZeroes(); + } + } + + /** + * Constructs a number without to create new space. This construct should be used only if the + * three fields of representation are known. + * + * @param sign the sign of the number + * @param numberLength the length of the internal array + * @param digits a reference of some array created before + */ + BigInteger(int sign, int numberLength, int[] digits) { + this.sign = sign; + this.numberLength = numberLength; + this.digits = digits; + } + + /** + * Returns a (new) {@code BigInteger} whose value is the absolute value of + * {@code this}. + * + * @return {@code abs(this)}. + */ + public BigInteger abs() { + return sign < 0 ? negate() : this; + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this + val}. + * + * @param val value to be added to {@code this}. + * @return {@code this + val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger add(BigInteger val) { + return Elementary.add(this, val); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this & val}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param val value to be and'ed with {@code this}. + * @return {@code this & val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger and(BigInteger val) { + return Logical.and(this, val); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this & ~val}. + * Evaluating {@code x.andNot(val)} returns the same result as {@code + * x.and(val.not())}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param val value to be not'ed and then and'ed with {@code this}. + * @return {@code this & ~val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger andNot(BigInteger val) { + return Logical.andNot(this, val); + } + + /** + * Use {@code bitLength(0)} if you want to know the length of the binary value + * in bits. + *

+ * Returns the number of bits in the binary representation of {@code this} + * which differ from the sign bit. If {@code this} is positive the result is + * equivalent to the number of bits set in the binary representation of + * {@code this}. If {@code this} is negative the result is equivalent to the + * number of bits set in the binary representation of {@code -this-1}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @return number of bits in the binary representation of {@code this} which + * differ from the sign bit + */ + public int bitCount() { + return BitLevel.bitCount(this); + } + + /** + * Returns the length of the value's two's complement representation without + * leading zeros for positive numbers / without leading ones for negative + * values. + *

+ * The two's complement representation of {@code this} will be at least + * {@code bitLength() + 1} bits long. + *

+ * The value will fit into an {@code int} if {@code bitLength() < 32} or into + * a {@code long} if {@code bitLength() < 64}. + * + * @return the length of the minimal two's complement representation for + * {@code this} without the sign bit. + */ + public int bitLength() { + return BitLevel.bitLength(this); + } + + /** + * Converts value of this {@code BigInteger} to a {@code byte} if it fits it, + * otherwise {@code ArithmeticException} is thrown. + * + * @return this {@code BigInteger} converted to a {@code byte}. + * @throws ArithmeticException if the value of this {@code BigInteger} + * does not fit in a {@code byte}. + */ + public byte byteValueExact() { + if (numberLength <= 1 && bitLength() < Byte.SIZE) { + return byteValue(); + } + throw new ArithmeticException("out of byte range"); + } + + /** + * Returns a new {@code BigInteger} which has the same binary representation + * as {@code this} but with the bit at position n cleared. The result is + * equivalent to {@code this & ~(2^n)}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param n position where the bit in {@code this} has to be cleared. + * @return {@code this & ~(2^n)}. + * @throws ArithmeticException if {@code n < 0}. + */ + public BigInteger clearBit(int n) { + if (testBit(n)) { + return BitLevel.flipBit(this, n); + } + return this; + } + + /** + * Compares this {@code BigInteger} with {@code val}. Returns one of the three + * values 1, 0, or -1. + * + * @param val value to be compared with {@code this}. + * @return {@code 1} if {@code this > val}, {@code -1} if {@code this < val} , + * {@code 0} if {@code this == val}. + * @throws NullPointerException if {@code val == null}. + */ + @Override + public int compareTo(BigInteger val) { + if (sign > val.sign) { + return GREATER; + } + if (sign < val.sign) { + return LESS; + } + if (numberLength > val.numberLength) { + return sign; + } + if (numberLength < val.numberLength) { + return -val.sign; + } + // Equal sign and equal numberLength + return (sign * Elementary.compareArrays(digits, val.digits, numberLength)); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this / divisor}. + * + * @param divisor value by which {@code this} is divided. + * @return {@code this / divisor}. + * @throws NullPointerException if {@code divisor == null}. + * @throws ArithmeticException if {@code divisor == 0}. + */ + public BigInteger divide(BigInteger divisor) { + if (divisor.sign == 0) { + // math.17=BigInteger divide by zero + throw new ArithmeticException("BigInteger divide by zero"); //$NON-NLS-1$ + } + int divisorSign = divisor.sign; + if (divisor.isOne()) { + return ((divisor.sign > 0) ? this : this.negate()); + } + int thisSign = sign; + int thisLen = numberLength; + int divisorLen = divisor.numberLength; + if (thisLen + divisorLen == 2) { + long val = (digits[0] & 0xFFFFFFFFL) / (divisor.digits[0] & 0xFFFFFFFFL); + if (thisSign != divisorSign) { + val = -val; + } + return valueOf(val); + } + int cmp = ((thisLen != divisorLen) ? ((thisLen > divisorLen) ? 1 : -1) + : Elementary.compareArrays(digits, divisor.digits, thisLen)); + if (cmp == EQUALS) { + return ((thisSign == divisorSign) ? ONE : MINUS_ONE); + } + if (cmp == LESS) { + return ZERO; + } + int resLength = thisLen - divisorLen + 1; + int resDigits[] = new int[resLength]; + int resSign = ((thisSign == divisorSign) ? 1 : -1); + if (divisorLen == 1) { + Division.divideArrayByInt(resDigits, digits, thisLen, divisor.digits[0]); + } else { + Division.divide(resDigits, resLength, digits, thisLen, divisor.digits, + divisorLen); + } + BigInteger result = new BigInteger(resSign, resLength, resDigits); + result.cutOffLeadingZeroes(); + return result; + } + + /** + * Returns a {@code BigInteger} array which contains {@code this / divisor} at + * index 0 and {@code this % divisor} at index 1. + * + * @param divisor value by which {@code this} is divided. + * @return {@code [this / divisor, this % divisor]}. + * @throws NullPointerException if {@code divisor == null}. + * @throws ArithmeticException if {@code divisor == 0}. + * @see #divide + * @see #remainder + */ + public BigInteger[] divideAndRemainder(BigInteger divisor) { + int divisorSign = divisor.sign; + if (divisorSign == 0) { + // math.17=BigInteger divide by zero + throw new ArithmeticException("BigInteger divide by zero"); //$NON-NLS-1$ + } + int divisorLen = divisor.numberLength; + int[] divisorDigits = divisor.digits; + if (divisorLen == 1) { + return Division.divideAndRemainderByInteger(this, divisorDigits[0], + divisorSign); + } + // res[0] is a quotient and res[1] is a remainder: + int[] thisDigits = digits; + int thisLen = numberLength; + int cmp = (thisLen != divisorLen) ? ((thisLen > divisorLen) ? 1 : -1) + : Elementary.compareArrays(thisDigits, divisorDigits, thisLen); + if (cmp < 0) { + return new BigInteger[] {ZERO, this}; + } + int thisSign = sign; + int quotientLength = thisLen - divisorLen + 1; + int remainderLength = divisorLen; + int quotientSign = ((thisSign == divisorSign) ? 1 : -1); + int quotientDigits[] = new int[quotientLength]; + int remainderDigits[] = Division.divide(quotientDigits, quotientLength, + thisDigits, thisLen, divisorDigits, divisorLen); + BigInteger result0 = new BigInteger(quotientSign, quotientLength, + quotientDigits); + BigInteger result1 = new BigInteger(thisSign, remainderLength, + remainderDigits); + result0.cutOffLeadingZeroes(); + result1.cutOffLeadingZeroes(); + return new BigInteger[] {result0, result1}; + } + + /** + * Returns this {@code BigInteger} as an double value. If {@code this} is too + * big to be represented as an double, then {@code Double.POSITIVE_INFINITY} + * or {@code Double.NEGATIVE_INFINITY} is returned. Note, that not all + * integers x in the range [-Double.MAX_VALUE, Double.MAX_VALUE] can be + * represented as a double. The double representation has a mantissa of length + * 53. For example, 2^53+1 = 9007199254740993 is returned as double + * 9007199254740992.0. + * + * @return this {@code BigInteger} as a double value + */ + @Override + public double doubleValue() { + return Double.parseDouble(this.toString()); + } + + /** + * Returns {@code true} if {@code x} is a BigInteger instance and if this + * instance is equal to this {@code BigInteger}. + * + * @param x object to be compared with {@code this}. + * @return true if {@code x} is a BigInteger and {@code this == x}, {@code + * false} otherwise. + */ + @Override + public boolean equals(Object x) { + if (this == x) { + return true; + } + if (x instanceof BigInteger) { + BigInteger x1 = (BigInteger) x; + return sign == x1.sign && numberLength == x1.numberLength + && equalsArrays(x1.digits); + } + return false; + } + + /** + * Returns a new {@code BigInteger} which has the same binary representation + * as {@code this} but with the bit at position n flipped. The result is + * equivalent to {@code this ^ 2^n}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param n position where the bit in {@code this} has to be flipped. + * @return {@code this ^ 2^n}. + * @throws ArithmeticException if {@code n < 0}. + */ + public BigInteger flipBit(int n) { + if (n < 0) { + // math.15=Negative bit address + throw new ArithmeticException("Negative bit address"); //$NON-NLS-1$ + } + return BitLevel.flipBit(this, n); + } + + /** + * Returns this {@code BigInteger} as an float value. If {@code this} is too + * big to be represented as an float, then {@code Float.POSITIVE_INFINITY} or + * {@code Float.NEGATIVE_INFINITY} is returned. Note, that not all integers x + * in the range [-Float.MAX_VALUE, Float.MAX_VALUE] can be represented as a + * float. The float representation has a mantissa of length 24. For example, + * 2^24+1 = 16777217 is returned as float 16777216.0. + * + * @return this {@code BigInteger} as a float value. + */ + @Override + public float floatValue() { + return Float.parseFloat(this.toString()); + } + + /** + * Returns a new {@code BigInteger} whose value is greatest common divisor of + * {@code this} and {@code val}. If {@code this==0} and {@code val==0} then + * zero is returned, otherwise the result is positive. + * + * @param val value with which the greatest common divisor is computed. + * @return {@code gcd(this, val)}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger gcd(BigInteger val) { + BigInteger val1 = this.abs(); + BigInteger val2 = val.abs(); + // To avoid a possible division by zero + if (val1.signum() == 0) { + return val2; + } else if (val2.signum() == 0) { + return val1; + } + + // Optimization for small operands + // (op2.bitLength() < 64) and (op1.bitLength() < 64) + if (((val1.numberLength == 1) || ((val1.numberLength == 2) && (val1.digits[1] > 0))) + && (val2.numberLength == 1 || (val2.numberLength == 2 && val2.digits[1] > 0))) { + return BigInteger.valueOf(Division.gcdBinary(val1.longValue(), + val2.longValue())); + } + + return Division.gcdBinary(val1.copy(), val2.copy()); + } + + /** + * Returns the position of the lowest set bit in the two's complement + * representation of this {@code BigInteger}. If all bits are zero (this=0) + * then -1 is returned as result. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @return position of lowest bit if {@code this != 0}, {@code -1} otherwise + */ + public int getLowestSetBit() { + if (sign == 0) { + return -1; + } + // (sign != 0) implies that exists some non zero digit + int i = getFirstNonzeroDigit(); + return ((i << 5) + Integer.numberOfTrailingZeros(digits[i])); + } + + /** + * Returns a hash code for this {@code BigInteger}. + * + * @return hash code for {@code this}. + */ + @Override + public int hashCode() { + if (hashCode != 0) { + return hashCode; + } + for (int i = 0; i < digits.length; i++) { + hashCode = (hashCode * 33 + (digits[i] & 0xffffffff)); + } + hashCode = hashCode * sign; + return hashCode; + } + + /** + * Returns this {@code BigInteger} as an int value. If {@code this} is too big + * to be represented as an int, then {@code this} % 2^32 is returned. + * + * @return this {@code BigInteger} as an int value. + */ + @Override + public int intValue() { + int i = digits[0]; + // i is always positive except for Integer.MIN_VALUE because of int overflow + return sign > 0 ? i : ensureInt(-i); + } + + /** + * Converts value of this {@code BigInteger} to an {@code int} if it fits it, + * otherwise {@code ArithmeticException} is thrown. + * + * @return this {@code BigInteger} converted to an {@code int}. + * @throws ArithmeticException if the value of this {@code BigInteger} + * does not fit in an {@code int}. + */ + public int intValueExact() { + if (numberLength <= 1 && bitLength() < Integer.SIZE) { + return intValue(); + } + throw new ArithmeticException("out of int range"); + } + + /** + * Tests whether this {@code BigInteger} is probably prime. If {@code true} is + * returned, then this is prime with a probability beyond (1-1/2^certainty). + * If {@code false} is returned, then this is definitely composite. If the + * argument {@code certainty} <= 0, then this method returns true. + * + * @param certainty tolerated primality uncertainty. + * @return {@code true}, if {@code this} is probably prime, {@code false} + * otherwise. + */ + public boolean isProbablePrime(int certainty) { + return Primality.isProbablePrime(abs(), certainty); + } + + /** + * Returns this {@code BigInteger} as an long value. If {@code this} is too + * big to be represented as an long, then {@code this} % 2^64 is returned. + * + * @return this {@code BigInteger} as a long value. + */ + @Override + public long longValue() { + long value = + numberLength > 1 + ? LongUtils.fromBits(digits[0], digits[1]) + : LongUtils.fromBits(digits[0], 0); + return sign > 0 ? value : -value; + } + + /** + * Converts value of this {@code BigInteger} to a {@code long} if it fits it, + * otherwise {@code ArithmeticException} is thrown. + * + * @return this {@code BigInteger} converted to a {@code long}. + * @throws ArithmeticException if the value of this {@code BigInteger} + * does not fit in a {@code long}. + */ + public long longValueExact() { + if (numberLength <= 2 && bitLength() < Long.SIZE) { + return longValue(); + } + throw new ArithmeticException("out of long range"); + } + + /** + * Returns the maximum of this {@code BigInteger} and {@code val}. + * + * @param val value to be used to compute the maximum with {@code this} + * @return {@code max(this, val)} + * @throws NullPointerException if {@code val == null} + */ + public BigInteger max(BigInteger val) { + return ((this.compareTo(val) == GREATER) ? this : val); + } + + /** + * Returns the minimum of this {@code BigInteger} and {@code val}. + * + * @param val value to be used to compute the minimum with {@code this}. + * @return {@code min(this, val)}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger min(BigInteger val) { + return ((this.compareTo(val) == LESS) ? this : val); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this mod m}. The + * modulus {@code m} must be positive. The result is guaranteed to be in the + * interval {@code [0, m)} (0 inclusive, m exclusive). The behavior of this + * function is not equivalent to the behavior of the % operator defined for + * the built-in {@code int}'s. + * + * @param m the modulus. + * @return {@code this mod m}. + * @throws NullPointerException if {@code m == null}. + * @throws ArithmeticException if {@code m < 0}. + */ + public BigInteger mod(BigInteger m) { + if (m.sign <= 0) { + // math.18=BigInteger: modulus not positive + throw new ArithmeticException("BigInteger: modulus not positive"); //$NON-NLS-1$ + } + BigInteger rem = remainder(m); + return ((rem.sign < 0) ? rem.add(m) : rem); + } + + // @Override + // public double doubleValue() { + // return Conversion.bigInteger2Double(this); + // } + + /** + * Returns a new {@code BigInteger} whose value is {@code 1/this mod m}. The + * modulus {@code m} must be positive. The result is guaranteed to be in the + * interval {@code [0, m)} (0 inclusive, m exclusive). If {@code this} is not + * relatively prime to m, then an exception is thrown. + * + * @param m the modulus. + * @return {@code 1/this mod m}. + * @throws NullPointerException if {@code m == null} + * @throws ArithmeticException if {@code m < 0 or} if {@code this} is not + * relatively prime to {@code m} + */ + public BigInteger modInverse(BigInteger m) { + if (m.sign <= 0) { + // math.18=BigInteger: modulus not positive + throw new ArithmeticException("BigInteger: modulus not positive"); //$NON-NLS-1$ + } + // If both are even, no inverse exists + if (!(testBit(0) || m.testBit(0))) { + // math.19=BigInteger not invertible. + throw new ArithmeticException("BigInteger not invertible."); //$NON-NLS-1$ + } + if (m.isOne()) { + return ZERO; + } + + // From now on: (m > 1) + BigInteger res = Division.modInverseMontgomery(abs().mod(m), m); + if (res.sign == 0) { + // math.19=BigInteger not invertible. + throw new ArithmeticException("BigInteger not invertible."); //$NON-NLS-1$ + } + + res = ((sign < 0) ? m.subtract(res) : res); + return res; + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this^exponent mod m} + * . The modulus {@code m} must be positive. The result is guaranteed to be in + * the interval {@code [0, m)} (0 inclusive, m exclusive). If the exponent is + * negative, then {@code this.modInverse(m)^(-exponent) mod m)} is computed. + * The inverse of this only exists if {@code this} is relatively prime to m, + * otherwise an exception is thrown. + * + * @param exponent the exponent. + * @param m the modulus. + * @return {@code this^exponent mod val}. + * @throws NullPointerException if {@code m == null} or {@code exponent == + * null}. + * @throws ArithmeticException if {@code m < 0} or if {@code exponent<0} and + * this is not relatively prime to {@code m}. + */ + public BigInteger modPow(BigInteger exponent, BigInteger m) { + if (m.sign <= 0) { + // math.18=BigInteger: modulus not positive + throw new ArithmeticException("BigInteger: modulus not positive"); //$NON-NLS-1$ + } + BigInteger base = this; + + if (m.isOne() | (exponent.sign > 0 & base.sign == 0)) { + return BigInteger.ZERO; + } + if (base.sign == 0 && exponent.sign == 0) { + return BigInteger.ONE; + } + if (exponent.sign < 0) { + base = modInverse(m); + exponent = exponent.negate(); + } + // From now on: (m > 0) and (exponent >= 0) + BigInteger res = (m.testBit(0)) ? Division.oddModPow(base.abs(), exponent, + m) : Division.evenModPow(base.abs(), exponent, m); + if ((base.sign < 0) && exponent.testBit(0)) { + // -b^e mod m == ((-1 mod m) * (b^e mod m)) mod m + res = m.subtract(BigInteger.ONE).multiply(res).mod(m); + } + // else exponent is even, so base^exp is positive + return res; + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this * val}. + * + * @param val value to be multiplied with {@code this}. + * @return {@code this * val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger multiply(BigInteger val) { + // This let us to throw NullPointerException when val == null + if (val.sign == 0 || sign == 0) { + return ZERO; + } + return Multiplication.multiply(this, val); + } + + /** + * Returns a new {@code BigInteger} whose value is the {@code -this}. + * + * @return {@code -this}. + */ + public BigInteger negate() { + return sign == 0 ? this : new BigInteger(-sign, numberLength, digits); + } + + /** + * Returns the smallest integer x > {@code this} which is probably prime as a + * {@code BigInteger} instance. The probability that the returned {@code + * BigInteger} is prime is beyond (1-1/2^80). + * + * @return smallest integer > {@code this} which is robably prime. + * @throws ArithmeticException if {@code this < 0}. + */ + public BigInteger nextProbablePrime() { + if (sign < 0) { + // math.1A=start < 0: {0} + throw new ArithmeticException("start < 0: " + this); //$NON-NLS-1$ + } + return Primality.nextProbablePrime(this); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code ~this}. The result + * of this operation is {@code -this-1}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @return {@code ~this}. + */ + public BigInteger not() { + return Logical.not(this); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this | val}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param val value to be or'ed with {@code this}. + * @return {@code this | val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger or(BigInteger val) { + return Logical.or(this, val); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this ^ exp}. + * + * @param exp exponent to which {@code this} is raised. + * @return {@code this ^ exp}. + * @throws ArithmeticException if {@code exp < 0}. + */ + public BigInteger pow(int exp) { + if (exp < 0) { + // math.16=Negative exponent + throw new ArithmeticException("Negative exponent"); //$NON-NLS-1$ + } + if (exp == 0) { + return ONE; + } else if (exp == 1 || equals(ONE) || equals(ZERO)) { + return this; + } + + // if even take out 2^x factor which we can + // calculate by shifting. + if (!testBit(0)) { + int x = 1; + while (!testBit(x)) { + x++; + } + return getPowerOfTwo(x * exp).multiply(this.shiftRight(x).pow(exp)); + } + return Multiplication.pow(this, exp); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this % divisor}. + * Regarding signs this methods has the same behavior as the % operator on + * int's, i.e. the sign of the remainder is the same as the sign of this. + * + * @param divisor value by which {@code this} is divided. + * @return {@code this % divisor}. + * @throws NullPointerException if {@code divisor == null}. + * @throws ArithmeticException if {@code divisor == 0}. + */ + public BigInteger remainder(BigInteger divisor) { + if (divisor.sign == 0) { + // math.17=BigInteger divide by zero + throw new ArithmeticException("BigInteger divide by zero"); //$NON-NLS-1$ + } + int thisLen = numberLength; + int divisorLen = divisor.numberLength; + if (((thisLen != divisorLen) ? ((thisLen > divisorLen) ? 1 : -1) + : Elementary.compareArrays(digits, divisor.digits, thisLen)) == LESS) { + return this; + } + int resLength = divisorLen; + int resDigits[] = new int[resLength]; + if (resLength == 1) { + resDigits[0] = Division.remainderArrayByInt(digits, thisLen, + divisor.digits[0]); + } else { + int qLen = thisLen - divisorLen + 1; + resDigits = Division.divide(null, qLen, digits, thisLen, divisor.digits, + divisorLen); + } + BigInteger result = new BigInteger(sign, resLength, resDigits); + result.cutOffLeadingZeroes(); + return result; + } + + /** + * Returns a new {@code BigInteger} which has the same binary representation + * as {@code this} but with the bit at position n set. The result is + * equivalent to {@code this | 2^n}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param n position where the bit in {@code this} has to be set. + * @return {@code this | 2^n}. + * @throws ArithmeticException if {@code n < 0}. + */ + public BigInteger setBit(int n) { + if (!testBit(n)) { + return BitLevel.flipBit(this, n); + } + return this; + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this << n}. The + * result is equivalent to {@code this * 2^n} if n >= 0. The shift distance + * may be negative which means that {@code this} is shifted right. The result + * then corresponds to {@code floor(this / 2^(-n))}. + *

+ * Implementation Note: Usage of this method on negative values is not + * recommended as the current implementation is not efficient. + * + * @param n shift distance. + * @return {@code this << n} if {@code n >= 0}; {@code this >> (-n)}. + * otherwise + */ + public BigInteger shiftLeft(int n) { + if ((n == 0) || (sign == 0)) { + return this; + } + return ((n > 0) ? BitLevel.shiftLeft(this, n) : BitLevel.shiftRight(this, + -n)); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this >> n}. For + * negative arguments, the result is also negative. The shift distance may be + * negative which means that {@code this} is shifted left. + *

+ * Implementation Note: Usage of this method on negative values is not + * recommended as the current implementation is not efficient. + * + * @param n shift distance + * @return {@code this >> n} if {@code n >= 0}; {@code this << (-n)} otherwise + */ + public BigInteger shiftRight(int n) { + if ((n == 0) || (sign == 0)) { + return this; + } + return ((n > 0) ? BitLevel.shiftRight(this, n) : BitLevel.shiftLeft(this, + -n)); + } + + /** + * Converts value of this {@code BigInteger} to a {@code short} if it fits it, + * otherwise {@code ArithmeticException} is thrown. + * + * @return this {@code BigInteger} converted to a {@code short}. + * @throws ArithmeticException if the value of this {@code BigInteger} + * does not fit in a {@code short}. + */ + public short shortValueExact() { + if (numberLength <= 1 && bitLength() < Short.SIZE) { + return shortValue(); + } + throw new ArithmeticException("out of short range"); + } + + /** + * Returns the sign of this {@code BigInteger}. + * + * @return {@code -1} if {@code this < 0}, {@code 0} if {@code this == 0}, + * {@code 1} if {@code this > 0}. + */ + public int signum() { + return sign; + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this - val}. + * + * @param val value to be subtracted from {@code this}. + * @return {@code this - val}. + * @throws NullPointerException if {@code val == null}. + */ + public BigInteger subtract(BigInteger val) { + return Elementary.subtract(this, val); + } + + /** + * Tests whether the bit at position n in {@code this} is set. The result is + * equivalent to {@code this & (2^n) != 0}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param n position where the bit in {@code this} has to be inspected. + * @return {@code this & (2^n) != 0}. + * @throws ArithmeticException if {@code n < 0}. + */ + public boolean testBit(int n) { + if (n == 0) { + return ((digits[0] & 1) != 0); + } + if (n < 0) { + // math.15=Negative bit address + throw new ArithmeticException("Negative bit address"); //$NON-NLS-1$ + } + int intCount = n >> 5; + if (intCount >= numberLength) { + return (sign < 0); + } + int digit = digits[intCount]; + n = (1 << (n & 31)); // int with 1 set to the needed position + if (sign < 0) { + int firstNonZeroDigit = getFirstNonzeroDigit(); + if (intCount < firstNonZeroDigit) { + return false; + } else if (firstNonZeroDigit == intCount) { + digit = -digit; + } else { + digit = ~digit; + } + } + return ((digit & n) != 0); + } + + /** + * Returns the two's complement representation of this BigInteger in a byte + * array. + * + * @return two's complement representation of {@code this}. + */ + public byte[] toByteArray() { + if (this.sign == 0) { + return new byte[] {0}; + } + BigInteger temp = this; + int bitLen = bitLength(); + int iThis = getFirstNonzeroDigit(); + int bytesLen = (bitLen >> 3) + 1; + /* + * Puts the little-endian int array representing the magnitude of this + * BigInteger into the big-endian byte array. + */ + byte[] bytes = new byte[bytesLen]; + int firstByteNumber = 0; + int highBytes; + int digitIndex = 0; + int bytesInInteger = 4; + int digit; + int hB; + + if (bytesLen - (numberLength << 2) == 1) { + bytes[0] = (byte) ((sign < 0) ? -1 : 0); + highBytes = 4; + firstByteNumber++; + } else { + hB = bytesLen & 3; + highBytes = (hB == 0) ? 4 : hB; + } + + digitIndex = iThis; + bytesLen -= iThis << 2; + + if (sign < 0) { + digit = -temp.digits[digitIndex]; + digitIndex++; + if (digitIndex == numberLength) { + bytesInInteger = highBytes; + } + for (int i = 0; i < bytesInInteger; i++, digit >>= 8) { + bytes[--bytesLen] = (byte) digit; + } + while (bytesLen > firstByteNumber) { + digit = ~temp.digits[digitIndex]; + digitIndex++; + if (digitIndex == numberLength) { + bytesInInteger = highBytes; + } + for (int i = 0; i < bytesInInteger; i++, digit >>= 8) { + bytes[--bytesLen] = (byte) digit; + } + } + } else { + while (bytesLen > firstByteNumber) { + digit = temp.digits[digitIndex]; + digitIndex++; + if (digitIndex == numberLength) { + bytesInInteger = highBytes; + } + for (int i = 0; i < bytesInInteger; i++, digit >>= 8) { + bytes[--bytesLen] = (byte) digit; + } + } + } + return bytes; + } + + /** + * Returns a string representation of this {@code BigInteger} in decimal form. + * + * @return a string representation of {@code this} in decimal form. + */ + @Override + public String toString() { + return Conversion.toDecimalScaledString(this, 0); + } + + /** + * Returns a string containing a string representation of this {@code + * BigInteger} with base radix. If {@code radix} is less than + * {@link Character#MIN_RADIX} or greater than {@link Character#MAX_RADIX} + * then a decimal representation is returned. The characters of the string + * representation are generated with method {@link Character#forDigit}. + * + * @param radix base to be used for the string representation. + * @return a string representation of this with radix 10. + */ + public String toString(int radix) { + return Conversion.bigInteger2String(this, radix); + } + + /** + * Returns a new {@code BigInteger} whose value is {@code this ^ val}. + *

+ * Implementation Note: Usage of this method is not recommended as the + * current implementation is not efficient. + * + * @param val value to be xor'ed with {@code this} + * @return {@code this ^ val} + * @throws NullPointerException if {@code val == null} + */ + public BigInteger xor(BigInteger val) { + return Logical.xor(this, val); + } + + /* + * Returns a copy of the current instance to achieve immutability + */ + BigInteger copy() { + int[] copyDigits = new int[numberLength]; + System.arraycopy(digits, 0, copyDigits, 0, numberLength); + return new BigInteger(sign, numberLength, copyDigits); + } + + /* Private Methods */ + + /** + * Decreases {@code numberLength} if there are zero high elements. + */ + final void cutOffLeadingZeroes() { + while ((numberLength > 0) && (digits[--numberLength] == 0)) { + // Empty + } + if (digits[numberLength++] == 0) { + sign = 0; + } + } + + boolean equalsArrays(final int[] b) { + int i; + for (i = numberLength - 1; (i >= 0) && (digits[i] == b[i]); i--) { + // Empty + } + return i < 0; + } + + int getFirstNonzeroDigit() { + if (firstNonzeroDigit == -2) { + int i; + if (this.sign == 0) { + i = -1; + } else { + for (i = 0; digits[i] == 0; i++) { + // Empty + } + } + firstNonzeroDigit = i; + } + return firstNonzeroDigit; + } + + /** + * Tests if {@code this.abs()} is equals to {@code ONE}. + */ + boolean isOne() { + return ((numberLength == 1) && (digits[0] == 1)); + } + + BigInteger shiftLeftOneBit() { + return (sign == 0) ? this : BitLevel.shiftLeftOneBit(this); + } + + void unCache() { + firstNonzeroDigit = -2; + } + + /** + * Puts a big-endian byte array into a little-endian applying two complement. + */ + private void putBytesNegativeToIntegers(byte[] byteValues, int offset, int length) { + int bytesLen = length; + int highBytes = bytesLen & 3; + numberLength = (bytesLen >> 2) + ((highBytes == 0) ? 0 : 1); + digits = new int[numberLength]; + int i = 0; + // Setting the sign + digits[numberLength - 1] = -1; + // Put bytes to the int array starting from the end of the byte array + while (bytesLen > highBytes) { + digits[i] = (byteValues[--bytesLen + offset] & 0xFF) + | (byteValues[--bytesLen + offset] & 0xFF) << 8 + | (byteValues[--bytesLen + offset] & 0xFF) << 16 + | (byteValues[--bytesLen + offset] & 0xFF) << 24; + if (digits[i] != 0) { + digits[i] = -digits[i]; + firstNonzeroDigit = i; + i++; + while (bytesLen > highBytes) { + digits[i] = (byteValues[--bytesLen + offset] & 0xFF) + | (byteValues[--bytesLen + offset] & 0xFF) << 8 + | (byteValues[--bytesLen + offset] & 0xFF) << 16 + | (byteValues[--bytesLen + offset] & 0xFF) << 24; + digits[i] = ~digits[i]; + i++; + } + break; + } + i++; + } + if (highBytes != 0) { + // Put the first bytes in the highest element of the int array + if (firstNonzeroDigit != -2) { + for (int j = offset; j < bytesLen + offset; j++) { + digits[i] = (digits[i] << 8) | (byteValues[j] & 0xFF); + } + digits[i] = ~digits[i]; + } else { + for (int j = offset; j < bytesLen + offset; j++) { + digits[i] = (digits[i] << 8) | (byteValues[j] & 0xFF); + } + digits[i] = -digits[i]; + } + } + } + + /** + * Puts a big-endian byte array into a little-endian int array. + */ + private void putBytesPositiveToIntegers(byte[] byteValues, int offset, int length) { + int bytesLen = length; + int highBytes = bytesLen & 3; + numberLength = (bytesLen >> 2) + ((highBytes == 0) ? 0 : 1); + digits = new int[numberLength]; + int i = 0; + // Put bytes to the int array starting from the end of the byte array + while (bytesLen > highBytes) { + digits[i++] = (byteValues[--bytesLen + offset] & 0xFF) + | (byteValues[--bytesLen + offset] & 0xFF) << 8 + | (byteValues[--bytesLen + offset] & 0xFF) << 16 + | (byteValues[--bytesLen + offset] & 0xFF) << 24; + } + // Put the first bytes in the highest element of the int array + for (int j = offset; j < bytesLen + offset; j++) { + digits[i] = (digits[i] << 8) | (byteValues[j] & 0xFF); + } + } +} From 6365ef422d41ade479742e00b5d674a80e3e08de Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 7 Jun 2024 20:49:48 -0500 Subject: [PATCH 058/219] Rework js interfaces to follow the new structure --- .../web/client/api/JsPartitionedTable.java | 8 +- .../io/deephaven/web/client/api/JsTable.java | 8 +- .../deephaven/web/client/api/TableData.java | 49 +- .../AbstractTableSubscription.java | 48 +- .../subscription/SubscriptionTableData.java | 696 ++---------------- .../TableViewportSubscription.java | 77 +- .../client/api/subscription/ViewportData.java | 173 +---- .../web/client/api/widget/plot/ChartData.java | 11 +- .../api/widget/plot/FigureSubscription.java | 7 +- 9 files changed, 90 insertions(+), 987 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java index 38a184716a6..016522f310d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java @@ -19,7 +19,7 @@ import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.lifecycle.HasLifecycle; -import io.deephaven.web.client.api.subscription.SubscriptionTableData; +import io.deephaven.web.client.api.subscription.AbstractTableSubscription; import io.deephaven.web.client.api.subscription.TableSubscription; import io.deephaven.web.client.api.widget.JsWidget; import io.deephaven.web.client.fu.LazyPromise; @@ -155,11 +155,11 @@ private Promise subscribeToKeys() { private void handleKeys(Event update) { // noinspection unchecked - CustomEvent event = - (CustomEvent) update; + CustomEvent event = + (CustomEvent) update; // We're only interested in added rows, send an event indicating the new keys that are available - SubscriptionTableData.UpdateEventData eventData = event.detail; + AbstractTableSubscription.UpdateEventData eventData = event.detail; RangeSet added = eventData.getAdded().getRange(); added.indexIterator().forEachRemaining((long index) -> { // extract the key to use diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index d7fc4df1aaf..4a1d2eabdee 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -47,13 +47,11 @@ import io.deephaven.web.client.api.state.StateCache; import io.deephaven.web.client.api.subscription.TableSubscription; import io.deephaven.web.client.api.subscription.TableViewportSubscription; -import io.deephaven.web.client.api.subscription.ViewportData; import io.deephaven.web.client.api.subscription.ViewportRow; import io.deephaven.web.client.api.tree.JsRollupConfig; import io.deephaven.web.client.api.tree.JsTreeTable; import io.deephaven.web.client.api.tree.JsTreeTableConfig; import io.deephaven.web.client.api.widget.JsWidget; -import io.deephaven.web.client.fu.JsData; import io.deephaven.web.client.fu.JsItr; import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.fu.LazyPromise; @@ -147,15 +145,11 @@ public class JsTable extends HasLifecycle implements HasTableBinding, JoinableTa // change in some table data INTERNAL_EVENT_SIZELISTENER = "sizelistener-internal"; - // Amount of debounce to use when eating snapshot events. - public static final int DEBOUNCE_TIME = 20; public static final int MAX_BATCH_TIME = 600_000; private final WorkerConnection workerConnection; - private Map subscriptions = new HashMap<>(); - @Deprecated // TODO refactor this inside of the viewportSubscription type - private ViewportData currentViewportData; + private final Map subscriptions = new HashMap<>(); private ClientTableState lastVisibleState; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java index 8167f23b5cc..87ea4eaa16a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java @@ -8,6 +8,7 @@ import com.vertispan.tsdefs.annotations.TsUnion; import com.vertispan.tsdefs.annotations.TsUnionMember; import elemental2.core.JsArray; +import io.deephaven.web.client.api.subscription.AbstractTableSubscription; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsOverlay; import jsinterop.annotations.JsPackage; @@ -30,7 +31,7 @@ */ @TsName(namespace = "dh") public interface TableData { - public static final int NO_ROW_FORMAT_COLUMN = -1; + int NO_ROW_FORMAT_COLUMN = -1; /** * TS type union to allow either "int" or "LongWrapper" to be passed as an argument for various methods. @@ -61,27 +62,23 @@ default int asInt() { } } - @JsProperty - JsRangeSet getFullIndex(); - - @JsProperty - JsRangeSet getAdded(); - - @JsProperty - JsRangeSet getRemoved(); - - @JsProperty - JsRangeSet getModified(); - - // @JsProperty - // JsShiftData getShifts(); - @JsProperty JsArray getColumns(); + /** + * A lazily computed array of all rows in the entire table + * + * @return {@link AbstractTableSubscription.SubscriptionRow} array. + */ @JsProperty JsArray<@TsTypeRef(Row.class) ? extends Row> getRows(); + /** + * Reads a row object from the table, from which any subscribed column can be read. + * + * @param index the position or key to access + * @return the row at the given location + */ @JsMethod default Row get(RowPositionUnion index) { if (index.isLongWrapper()) { @@ -94,6 +91,13 @@ default Row get(RowPositionUnion index) { Row get(int index); + /** + * Reads a specific cell from the table, by row key and column. + * + * @param index the row in the table to get data from + * @param column the column to read + * @return the value in the table + */ @JsMethod default Any getData(RowPositionUnion index, Column column) { if (index.isLongWrapper()) { @@ -106,6 +110,13 @@ default Any getData(RowPositionUnion index, Column column) { Any getData(long index, Column column); + /** + * The server-specified Format to use for the cell at the given position. + * + * @param index the row to read + * @param column the column to read + * @return a Format instance with any server-specified details + */ @JsMethod default Format getFormat(RowPositionUnion index, Column column) { if (index.isLongWrapper()) { @@ -118,12 +129,6 @@ default Format getFormat(RowPositionUnion index, Column column) { Format getFormat(long index, Column column); - /** - * The position of the first returned row, null if this data is not for a viewport. - */ - @JsProperty - Double getOffset(); - @TsName(namespace = "dh") interface Row { @JsProperty diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 80de0cd6f5f..48615a08e8d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -192,9 +192,7 @@ public Format getFormat(Column column) { } - @TsInterface - @TsName(name = "SubscriptionTableData", namespace = "dh") - public class UpdateEventData implements TableData { + public class UpdateEventData implements SubscriptionTableData, ViewportData { private final JsRangeSet added; private final JsRangeSet removed; private final JsRangeSet modified; @@ -221,11 +219,6 @@ public Double getOffset() { return offset; } - /** - * A lazily computed array of all rows in the entire table - * - * @return {@link SubscriptionRow} array. - */ @Override public JsArray<@TsTypeRef(SubscriptionRow.class) ? extends SubscriptionRow> getRows() { if (allRows == null) { @@ -251,12 +244,6 @@ public Row get(int index) { return this.get((long) index); } - /** - * Reads a row object from the table, from which any subscribed column can be read - * - * @param index - * @return {@link SubscriptionRow} - */ @Override public SubscriptionRow get(long index) { return makeRow(index); @@ -267,25 +254,11 @@ public Any getData(int index, Column column) { return getData((long) index, column); } - /** - * a specific cell from the table, from the specified row and column - * - * @param index - * @param column - * @return Any - */ @Override - public Any getData(long index, Column column) { - return barrageSubscription.getData(index, column.getIndex()); + public Any getData(long key, Column column) { + return barrageSubscription.getData(key, column.getIndex()); } - /** - * the Format to use for a cell from the specified row and column - * - * @param index - * @param column - * @return {@link Format} - */ @Override public Format getFormat(int index, Column column) { return getFormat((long) index, column); @@ -317,31 +290,16 @@ public JsArray getColumns() { return columns; } - /** - * The ordered set of row indexes added since the last update - * - * @return dh.RangeSet - */ @Override public JsRangeSet getAdded() { return added; } - /** - * The ordered set of row indexes removed since the last update - * - * @return dh.RangeSet - */ @Override public JsRangeSet getRemoved() { return removed; } - /** - * The ordered set of row indexes updated since the last update - * - * @return dh.RangeSet - */ @Override public JsRangeSet getModified() { return modified; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java index fb20e30494d..402bb08bea9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java @@ -5,670 +5,56 @@ import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; -import elemental2.core.JsArray; -import elemental2.dom.CustomEventInit; -import io.deephaven.web.client.api.*; -import io.deephaven.web.client.fu.JsSettings; -import io.deephaven.web.shared.data.*; -import io.deephaven.web.shared.data.columns.ColumnData; -import jsinterop.annotations.JsFunction; +import io.deephaven.web.client.api.JsRangeSet; +import io.deephaven.web.client.api.TableData; import jsinterop.annotations.JsProperty; -import jsinterop.base.Any; -import jsinterop.base.Js; -import jsinterop.base.JsArrayLike; -import javax.annotation.Nullable; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.NavigableSet; -import java.util.PrimitiveIterator; -import java.util.TreeMap; -import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; - -@Deprecated -public class SubscriptionTableData { - @JsFunction - private interface ArrayCopy { - @SuppressWarnings("unusable-by-js") - void copyTo(Object destArray, long destPos, Object srcArray, int srcPos); - } - - private final JsArray columns; - private final int rowStyleColumn; - private final HasEventHandling evented; - - // the actual rows present on the client, in their correct order - private RangeSet index; - - // mappings from the index to the position of a row in the data array - private TreeMap redirectedIndexes; - - // rows in the data columns that no longer contain data and can be reused - private RangeSet reusableDestinations; - - // array of data columns, cast each to a jsarray to read rows - private Object[] data; - - public SubscriptionTableData(JsArray columns, int rowStyleColumn, HasEventHandling evented) { - this.columns = columns; - this.rowStyleColumn = rowStyleColumn; - this.evented = evented; - } - - // TODO support this being called multiple times so we can keep viewports going without clearing the data - public TableData handleSnapshot(TableSnapshot snapshot) { - // when changing snapshots we should actually rewrite the columns, possibly emulate ViewportData more? - ColumnData[] dataColumns = snapshot.getDataColumns(); - data = new Object[dataColumns.length]; - reusableDestinations = RangeSet.empty(); - redirectedIndexes = new TreeMap<>(); - index = snapshot.getIncludedRows(); - - long includedRowCount = snapshot.getIncludedRows().size(); - RangeSet destination = freeRows(includedRowCount); - - for (int index = 0; index < dataColumns.length; index++) { - ColumnData dataColumn = dataColumns[index]; - if (dataColumn == null) { - // no data in this column, wasn't requested - continue; - } - - final int i = index; - Column column = columns.find((c, i1, i2) -> c.getIndex() == i); - - ArrayCopy arrayCopy = arrayCopyFuncForColumn(column); - - Object[] localCopy = new Object[(int) includedRowCount]; - data[index] = localCopy; - PrimitiveIterator.OfLong destIter = destination.indexIterator(); - PrimitiveIterator.OfLong indexIter = snapshot.getIncludedRows().indexIterator(); - int j = 0; - while (indexIter.hasNext()) { - assert destIter.hasNext(); - long dest = destIter.nextLong(); - redirectedIndexes.put(indexIter.nextLong(), dest); - arrayCopy.copyTo(localCopy, dest, dataColumn.getData(), j++); - } - assert !destIter.hasNext(); - } - - return notifyUpdates(index, RangeSet.empty(), RangeSet.empty()); - } +/** + * Event data, describing the indexes that were added/removed/updated, and providing access to Rows (and thus data in + * columns) either by index, or scanning the complete present index. + *

+ * This class supports two ways of reading the table - checking the changes made since the last update, and reading all + * data currently in the table. While it is more expensive to always iterate over every single row in the table, it may + * in some cases actually be cheaper than maintaining state separately and updating only the changes, though both + * options should be considered. + *

+ * The RangeSet objects allow iterating over the LongWrapper indexes in the table. Note that these "indexes" are not + * necessarily contiguous and may be negative, and represent some internal state on the server, allowing it to keep + * track of data efficiently. Those LongWrapper objects can be passed to the various methods on this instance to read + * specific rows or cells out of the table. + */ +@TsInterface +@TsName(name = "SubscriptionTableData", namespace = "dh") +public interface SubscriptionTableData extends TableData { + + + @JsProperty + JsRangeSet getFullIndex(); /** - * Helper to avoid appending many times when modifying indexes. The append() method should be called for each key - * _in order_ to ensure that RangeSet.addRange isn't called excessively. When no more items will be added, flush() - * must be called. + * The ordered set of row indexes added since the last update. + * + * @return the rangeset of rows added */ - private static class RangeSetAppendHelper { - private final RangeSet rangeSet; - - private long currentFirst = -1; - private long currentLast; - - public RangeSetAppendHelper(final RangeSet rangeSet) { - this.rangeSet = rangeSet; - } - - public void append(long key) { - assert key >= 0; - - if (currentFirst == -1) { - // first key to be added, move both first and last - currentFirst = key; - currentLast = key; - - return; - } - - if (key == currentLast + 1) { - // key appends to our current range - currentLast = key; - } else if (key == currentFirst - 1) { - // key appends to our current range - currentFirst = key; - } else { - // existing range doesn't match the new item, finish the old range and start a new one - rangeSet.addRange(new Range(currentFirst, currentLast)); - currentFirst = key; - currentLast = key; - } - } - - public void flush() { - if (currentFirst != -1) { - rangeSet.addRange(new Range(currentFirst, currentLast)); - currentFirst = -1; - } - } - } - - public TableData handleDelta(DeltaUpdates delta) { - // delete old data, track slots freed up. we do this by row since they might be non-contiguous or out of order - RangeSetAppendHelper reusableHelper = new RangeSetAppendHelper(reusableDestinations); - delta.getRemoved().indexIterator().forEachRemaining((long index) -> { - long dest = redirectedIndexes.remove(index); - reusableHelper.append(dest); - // TODO consider trimming the columns down too, and truncating the reusable slots at the end - }); - reusableHelper.flush(); - // clean up index by ranges, not by row - delta.getRemoved().rangeIterator().forEachRemaining(index::removeRange); - - // Shift moved rows in the redir index - boolean hasReverseShift = false; - final ShiftedRange[] shiftedRanges = delta.getShiftedRanges(); - RangeSetAppendHelper shifter = new RangeSetAppendHelper(index); - for (int i = shiftedRanges.length - 1; i >= 0; --i) { - final ShiftedRange shiftedRange = shiftedRanges[i]; - final long offset = shiftedRange.getDelta(); - if (offset < 0) { - hasReverseShift = true; - continue; - } - index.removeRange(shiftedRange.getRange()); - final NavigableSet toMove = redirectedIndexes.navigableKeySet() - .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); - // iterate backward and move them forward - for (Long key : toMove.descendingSet()) { - long shiftedKey = key + offset; - Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); - assert oldValue == null : shiftedKey + " already has a value, " + oldValue; - shifter.append(shiftedKey); - } - } - if (hasReverseShift) { - for (int i = 0; i < shiftedRanges.length; ++i) { - final ShiftedRange shiftedRange = shiftedRanges[i]; - final long offset = shiftedRange.getDelta(); - if (offset > 0) { - continue; - } - index.removeRange(shiftedRange.getRange()); - final NavigableSet toMove = redirectedIndexes.navigableKeySet() - .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); - // iterate forward and move them backward - for (Long key : toMove) { - long shiftedKey = key + offset; - Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); - assert oldValue == null : shiftedKey + " already has a value, " + oldValue; - shifter.append(shiftedKey); - } - } - } - shifter.flush(); - - // Find space for the rows we're about to add. We must not adjust the index until this is done, it is used - // to see where the end of the data is - RangeSet addedDestination = freeRows(delta.getAdded().size()); - // Within each column, append additions - DeltaUpdates.ColumnAdditions[] additions = delta.getSerializedAdditions(); - for (int i = 0; i < additions.length; i++) { - DeltaUpdates.ColumnAdditions addedColumn = delta.getSerializedAdditions()[i]; - Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); - - ArrayCopy arrayCopy = arrayCopyFuncForColumn(column); - - PrimitiveIterator.OfLong addedIndexes = delta.getAdded().indexIterator(); - PrimitiveIterator.OfLong destIter = addedDestination.indexIterator(); - int j = 0; - while (addedIndexes.hasNext()) { - long origIndex = addedIndexes.nextLong(); - assert delta.getIncludedAdditions().contains(origIndex); - assert destIter.hasNext(); - long dest = destIter.nextLong(); - Long old = redirectedIndexes.put(origIndex, dest); - assert old == null || old == dest; - arrayCopy.copyTo(data[addedColumn.getColumnIndex()], dest, addedColumn.getValues().getData(), j++); - } - } - - // Update the index to reflect the added items - delta.getAdded().rangeIterator().forEachRemaining(index::addRange); - - // Within each column, apply modifications - DeltaUpdates.ColumnModifications[] modifications = delta.getSerializedModifications(); - RangeSet allModified = new RangeSet(); - for (int i = 0; i < modifications.length; ++i) { - final DeltaUpdates.ColumnModifications modifiedColumn = modifications[i]; - if (modifiedColumn == null) { - continue; - } - - modifiedColumn.getRowsIncluded().rangeIterator().forEachRemaining(allModified::addRange); - Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); - - ArrayCopy arrayCopy = arrayCopyFuncForColumn(column); - - PrimitiveIterator.OfLong modifiedIndexes = modifiedColumn.getRowsIncluded().indexIterator(); - int j = 0; - while (modifiedIndexes.hasNext()) { - long origIndex = modifiedIndexes.nextLong(); - arrayCopy.copyTo(data[modifiedColumn.getColumnIndex()], redirectedIndexes.get(origIndex), - modifiedColumn.getValues().getData(), j++); - } - } - - // Check that the index sizes make sense - assert redirectedIndexes.size() == index.size(); - // Note that we can't do this assert, since we don't truncate arrays, we just leave nulls at the end - // assert Js.asArrayLike(data[0]).getLength() == redirectedIndexes.size(); - - return notifyUpdates(delta.getAdded(), delta.getRemoved(), allModified); - } - - private TableData notifyUpdates(RangeSet added, RangeSet removed, RangeSet modified) { - UpdateEventData detail = new UpdateEventData(added, removed, modified); - if (evented != null) { - CustomEventInit event = CustomEventInit.create(); - event.setDetail(detail); - evented.fireEvent(TableSubscription.EVENT_UPDATED, event); - } - return detail; - } - - private ArrayCopy arrayCopyFuncForColumn(@Nullable Column column) { - final String type = column != null ? column.getType() : ""; - switch (type) { - case "long": - return (destArray, destPos, srcArray, srcPos) -> { - final long value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asLong(); - if (value == QueryConstants.NULL_LONG) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, LongWrapper.of(value)); - } - }; - case "java.time.Instant": - case "java.time.ZonedDateTime": - return (destArray, destPos, srcArray, srcPos) -> { - long value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asLong(); - if (value == QueryConstants.NULL_LONG) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, new DateWrapper(value)); - } - }; - case "java.math.BigDecimal": - return (destArray, destPos, srcArray, srcPos) -> { - final BigDecimal value = Js.cast(Js.asArrayLike(srcArray).getAt(srcPos)); - if (value == null) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, new BigDecimalWrapper(value)); - } - }; - case "java.math.BigInteger": - return (destArray, destPos, srcArray, srcPos) -> { - final BigInteger value = Js.cast(Js.asArrayLike(srcArray).getAt(srcPos)); - if (value == null) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, new BigIntegerWrapper(value)); - } - }; - case "java.time.LocalDate": - return (destArray, destPos, srcArray, srcPos) -> { - final LocalDate value = Js.cast(Js.asArrayLike(srcArray).getAt(srcPos)); - if (value == null) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, new LocalDateWrapper(value)); - } - }; - case "java.time.LocalTime": - return (destArray, destPos, srcArray, srcPos) -> { - final LocalTime value = Js.cast(Js.asArrayLike(srcArray).getAt(srcPos)); - if (value == null) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, new LocalTimeWrapper(value)); - } - }; - case "java.lang.Boolean": - return (destArray, destPos, srcArray, srcPos) -> { - final Any value = Js.asArrayLike(srcArray).getAtAsAny(srcPos); - - if (value == null) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else if (value.asBoolean()) { - Js.asArrayLike(destArray).setAt((int) destPos, true); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, false); - } - }; - case "int": - return (destArray, destPos, srcArray, srcPos) -> { - final int value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asInt(); - if (value == QueryConstants.NULL_INT) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - case "byte": - return (destArray, destPos, srcArray, srcPos) -> { - final byte value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asByte(); - if (value == QueryConstants.NULL_BYTE) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - case "short": - return (destArray, destPos, srcArray, srcPos) -> { - final short value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asShort(); - if (value == QueryConstants.NULL_SHORT) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - case "double": - return (destArray, destPos, srcArray, srcPos) -> { - final double value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asDouble(); - if (value == QueryConstants.NULL_DOUBLE) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - case "float": - return (destArray, destPos, srcArray, srcPos) -> { - final float value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asFloat(); - if (value == QueryConstants.NULL_FLOAT) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - case "char": - return (destArray, destPos, srcArray, srcPos) -> { - final char value = Js.asArrayLike(srcArray).getAtAsAny(srcPos).asChar(); - if (value == QueryConstants.NULL_CHAR) { - Js.asArrayLike(destArray).setAt((int) destPos, null); - } else { - Js.asArrayLike(destArray).setAt((int) destPos, value); - } - }; - default: - // exit so we can handle null also in the method's final return - } - return (destArray, destPos, srcArray, srcPos) -> { - // boring column or format data, just copy it - Js.asArrayLike(destArray).setAt((int) destPos, Js.asArrayLike(srcArray).getAt(srcPos)); - }; - } - - private RangeSet freeRows(long required) { - if (required == 0) { - return RangeSet.empty(); - } - long existingSlotsToReuse = reusableDestinations.size(); - if (existingSlotsToReuse > required) { - // only take some of the ranges from the reusable list - RangeSet reused = RangeSet.empty(); - long taken = 0; - RangeSet stillUnused = RangeSet.empty(); - // TODO this could be more efficient, iterating entire ranges until we only need a partial range - PrimitiveIterator.OfLong iterator = reusableDestinations.indexIterator(); - while (taken < required) { - assert iterator.hasNext(); - long value = iterator.nextLong(); - reused.addRange(new Range(value, value)); - taken++; - } - assert taken == required; - while (iterator.hasNext()) { - long value = iterator.nextLong(); - stillUnused.addRange(new Range(value, value)); - } - reusableDestinations = stillUnused; - assert required == reused.size(); - return reused; - } - // take all ranges from the reusable list (plus make more if needed) - RangeSet created = reusableDestinations; - if (existingSlotsToReuse < required) { - long nextIndex; - if (created.isEmpty()) { - if (index.isEmpty()) { - nextIndex = 0; - } else { - nextIndex = redirectedIndexes.size(); - } - } else if (index.isEmpty()) { - nextIndex = created.getLastRow() + 1; - } else { - nextIndex = Math.max(created.getLastRow(), index.getLastRow()) + 1; - } - created.addRange(new Range(nextIndex, nextIndex + required - existingSlotsToReuse - 1)); - } - - reusableDestinations = RangeSet.empty(); - assert required == created.size(); - return created; - } - - @TsInterface - @TsName(namespace = "dh") - public class SubscriptionRow implements TableData.Row { - private final long index; - public LongWrapper indexCached; - - public SubscriptionRow(long index) { - this.index = index; - } - - @Override - public LongWrapper getIndex() { - if (indexCached == null) { - indexCached = LongWrapper.of(index); - } - return indexCached; - } - - @Override - public Any get(Column column) { - int redirectedIndex = (int) (long) redirectedIndexes.get(this.index); - JsArrayLike columnData = Js.asArrayLike(data[column.getIndex()]); - return columnData.getAtAsAny(redirectedIndex); - } - - @Override - public Format getFormat(Column column) { - long cellColors = 0; - long rowColors = 0; - String numberFormat = null; - String formatString = null; - int redirectedIndex = (int) (long) redirectedIndexes.get(this.index); - if (column.getStyleColumnIndex() != null) { - JsArray colors = Js.uncheckedCast(data[column.getStyleColumnIndex()]); - cellColors = colors.getAtAsAny(redirectedIndex).asLong(); - } - if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - JsArray rowStyle = Js.uncheckedCast(data[rowStyleColumn]); - rowColors = rowStyle.getAtAsAny(redirectedIndex).asLong(); - } - if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); - numberFormat = formatStrings.getAtAsAny(redirectedIndex).asString(); - } - if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); - formatString = formatStrings.getAtAsAny(redirectedIndex).asString(); - } - return new Format(cellColors, rowColors, numberFormat, formatString); - } - } - + @JsProperty + JsRangeSet getAdded(); /** - * Event data, describing the indexes that were added/removed/updated, and providing access to Rows (and thus data - * in columns) either by index, or scanning the complete present index. + * The ordered set of row indexes removed since the last update * - * This class supports two ways of reading the table - checking the changes made since the last update, and reading - * all data currently in the table. While it is more expensive to always iterate over every single row in the table, - * it may in some cases actually be cheaper than maintaining state separately and updating only the changes, though - * both options should be considered. - * - * The RangeSet objects allow iterating over the LongWrapper indexes in the table. Note that these "indexes" are not - * necessarily contiguous and may be negative, and represent some internal state on the server, allowing it to keep - * track of data efficiently. Those LongWrapper objects can be passed to the various methods on this instance to - * read specific rows or cells out of the table. + * @return the rangeset of removed rows */ - @TsInterface - @TsName(name = "SubscriptionTableData", namespace = "dh") - public class UpdateEventData implements TableData { - private JsRangeSet added; - private JsRangeSet removed; - private JsRangeSet modified; - - // cached copy in case it was requested, could be requested again - private JsArray allRows; + @JsProperty + JsRangeSet getRemoved(); - public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified) { - this.added = new JsRangeSet(added); - this.removed = new JsRangeSet(removed); - this.modified = new JsRangeSet(modified); - } - - /** - * A lazily computed array of all rows in the entire table - * - * @return {@link SubscriptionRow} array. - */ - @Override - public JsArray getRows() { - if (allRows == null) { - allRows = new JsArray<>(); - index.indexIterator().forEachRemaining((long index) -> { - allRows.push(new SubscriptionRow(index)); - }); - if (JsSettings.isDevMode()) { - assert allRows.length == index.size(); - } - } - return allRows; - } - - @Override - public Row get(int index) { - return this.get((long) index); - } - - /** - * Reads a row object from the table, from which any subscribed column can be read - * - * @param index - * @return {@link SubscriptionRow} - */ - @Override - public SubscriptionRow get(long index) { - return new SubscriptionRow(index); - } - - @Override - public Any getData(int index, Column column) { - return getData((long) index, column); - } - - /** - * a specific cell from the table, from the specified row and column - * - * @param index - * @param column - * @return Any - */ - @Override - public Any getData(long index, Column column) { - int redirectedIndex = (int) (long) redirectedIndexes.get(index); - JsArrayLike columnData = Js.asArrayLike(data[column.getIndex()]); - return columnData.getAtAsAny(redirectedIndex); - } - - /** - * the Format to use for a cell from the specified row and column - * - * @param index - * @param column - * @return {@link Format} - */ - @Override - public Format getFormat(int index, Column column) { - return getFormat((long) index, column); - } - - @Override - public Format getFormat(long index, Column column) { - long cellColors = 0; - long rowColors = 0; - String numberFormat = null; - String formatString = null; - int redirectedIndex = (int) (long) redirectedIndexes.get(index); - if (column.getStyleColumnIndex() != null) { - JsArray colors = Js.uncheckedCast(data[column.getStyleColumnIndex()]); - cellColors = colors.getAtAsAny(redirectedIndex).asLong(); - } - if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - JsArray rowStyle = Js.uncheckedCast(data[rowStyleColumn]); - rowColors = rowStyle.getAtAsAny(redirectedIndex).asLong(); - } - if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); - numberFormat = formatStrings.getAtAsAny(redirectedIndex).asString(); - } - if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); - formatString = formatStrings.getAtAsAny(redirectedIndex).asString(); - } - return new Format(cellColors, rowColors, numberFormat, formatString); - } - - @Override - public JsArray getColumns() { - return columns; - } - - /** - * The ordered set of row indexes added since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getAdded() { - return added; - } - - /** - * The ordered set of row indexes removed since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getRemoved() { - return removed; - } - - /** - * The ordered set of row indexes updated since the last update - * - * @return dh.RangeSet - */ - @JsProperty - public JsRangeSet getModified() { - return modified; - } - - @JsProperty - public JsRangeSet getFullIndex() { - return new JsRangeSet(index); - } + /** + * The ordered set of row indexes updated since the last update + * + * @return the rnageset of modified rows + */ + @JsProperty + JsRangeSet getModified(); - @Override - public Double getOffset() { - return null; - } - } + // @JsProperty + // JsShiftData getShifts(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 78f87c1415a..1245ee4d323 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -327,80 +327,9 @@ public double size() { public Promise snapshot(JsRangeSet rows, Column[] columns) { retainForExternalUse(); // TODO #1039 slice rows and drop columns - // final ClientTableState state = original.lastVisibleState(); - String[] columnTypes = Arrays.stream(state().getTableDef().getColumns()) - .map(ColumnDefinition::getType) - .toArray(String[]::new); - - final BitSet columnBitset = state().makeBitset(columns); - return Callbacks.promise(this, callback -> { - WorkerConnection connection = connection(); - BiDiStream stream = connection.streamFactory().create( - headers -> connection.flightServiceClient().doExchange(headers), - (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), - (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, - c::apply), - new FlightData()); - - FlatBufferBuilder doGetRequest = new FlatBufferBuilder(1024); - int columnsOffset = BarrageSnapshotRequest.createColumnsVector(doGetRequest, - columnBitset.toByteArray()); - int viewportOffset = BarrageSnapshotRequest.createViewportVector(doGetRequest, serializeRanges( - Collections.singleton(rows.getRange()))); - int serializationOptionsOffset = BarrageSnapshotOptions - .createBarrageSnapshotOptions(doGetRequest, ColumnConversionMode.Stringify, true, 0, 0); - int tableTicketOffset = - BarrageSnapshotRequest.createTicketVector(doGetRequest, - TypedArrayHelper.wrap(state().getHandle().getTicket())); - BarrageSnapshotRequest.startBarrageSnapshotRequest(doGetRequest); - BarrageSnapshotRequest.addTicket(doGetRequest, tableTicketOffset); - BarrageSnapshotRequest.addColumns(doGetRequest, columnsOffset); - BarrageSnapshotRequest.addSnapshotOptions(doGetRequest, serializationOptionsOffset); - BarrageSnapshotRequest.addViewport(doGetRequest, viewportOffset); - doGetRequest.finish(BarrageSnapshotRequest.endBarrageSnapshotRequest(doGetRequest)); - - FlightData request = new FlightData(); - request.setAppMetadata( - WebBarrageUtils.wrapMessage(doGetRequest, BarrageMessageType.BarrageSnapshotRequest)); - stream.send(request); - stream.end(); - stream.onData(flightData -> { - - Message message = Message.getRootAsMessage(TypedArrayHelper.wrap(flightData.getDataHeader_asU8())); - if (message.headerType() == MessageHeader.Schema) { - // ignore for now, we'll handle this later - return; - } - assert message.headerType() == MessageHeader.RecordBatch; - RecordBatch header = (RecordBatch) message.header(new RecordBatch()); - Uint8Array appMetadataBytes = flightData.getAppMetadata_asU8(); - BarrageUpdateMetadata update = null; - if (appMetadataBytes.length != 0) { - BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper - .getRootAsBarrageMessageWrapper(TypedArrayHelper.wrap(appMetadataBytes)); - - update = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( - barrageMessageWrapper.msgPayloadAsByteBuffer()); - } - TableSnapshot snapshot = WebBarrageUtils.createSnapshot(header, - WebBarrageUtils.typedArrayToAlignedLittleEndianByteBuffer(flightData.getDataBody_asU8()), - update, true, columnTypes); - callback.onSuccess(snapshot); - }); - stream.onStatus(status -> { - if (!status.isOk()) { - callback.onFailure(status.getDetails()); - } - }); - }).then(defer()).then(snapshot -> { - SubscriptionTableData pretendSubscription = new SubscriptionTableData(Js.uncheckedCast(columns), - state().getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : state().getRowFormatColumn().getIndex(), - null); - TableData data = pretendSubscription.handleSnapshot(snapshot); - return Promise.resolve(data); - }).then(defer()); + + + return null; } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index abda3f2bc49..15894dd6ce5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -26,6 +26,8 @@ * Extends {@link TableData}, but only contains data in the current viewport. The only API change from TableData is that * ViewportData also contains the offset to this data, so that the actual row number may be determined. *

+ * For viewport subscriptions, it is not necessary to read with the key, only with the position. + *

* Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided for easier * scrolling without going to the server. */ @@ -38,175 +40,4 @@ public interface ViewportData extends TableData { */ @JsProperty Double getOffset(); - - /** - * Clean the data at the provided index - */ - @JsFunction - interface DataCleaner { - void clean(JsArray data, int index); - } - - @Deprecated - public static final int NO_ROW_FORMAT_COLUMN = -1; - - public class MergeResults { - public Set added = new HashSet<>(); - public Set modified = new HashSet<>(); - public Set removed = new HashSet<>(); - } - - private static DataCleaner getDataCleanerForColumnType(String columnType) { - switch (columnType) { - case "int": - return (data, i) -> { - int value = data.getAtAsAny(i).asInt(); - if (value == QueryConstants.NULL_INT) { - data.setAt(i, null); - } - }; - case "byte": - return (data, i) -> { - byte value = data.getAtAsAny(i).asByte(); - if (value == QueryConstants.NULL_BYTE) { - data.setAt(i, null); - } - }; - case "short": - return (data, i) -> { - short value = data.getAtAsAny(i).asShort(); - if (value == QueryConstants.NULL_SHORT) { - data.setAt(i, null); - } - }; - case "double": - return (data, i) -> { - double value = data.getAtAsAny(i).asDouble(); - if (value == QueryConstants.NULL_DOUBLE) { - data.setAt(i, null); - } - }; - case "float": - return (data, i) -> { - float value = data.getAtAsAny(i).asFloat(); - if (value == QueryConstants.NULL_FLOAT) { - data.setAt(i, null); - } - }; - case "char": - return (data, i) -> { - char value = data.getAtAsAny(i).asChar(); - if (value == QueryConstants.NULL_CHAR) { - data.setAt(i, null); - } - }; - default: - return null; - } - } - - public static Object cleanData(Object dataColumn, Column column) { - if (dataColumn == null) { - return null; - } - if (column == null) { - return dataColumn; - } - - switch (column.getType()) { - case "long": { - JsArray values = Js.uncheckedCast(dataColumn); - LongWrapper[] cleanData = new LongWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - long value = values.getAtAsAny(i).asLong(); - if (value == QueryConstants.NULL_LONG) { - cleanData[i] = null; - } else { - cleanData[i] = LongWrapper.of(value); - } - } - return cleanData; - } - case "java.time.Instant": - case "java.time.ZonedDateTime": { - JsArray values = Js.uncheckedCast(dataColumn); - DateWrapper[] cleanData = new DateWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - long value = values.getAtAsAny(i).asLong(); - if (value == QueryConstants.NULL_LONG) { - cleanData[i] = null; - } else { - cleanData[i] = new DateWrapper(value); - } - } - return cleanData; - } - case "java.math.BigDecimal": { - final JsArray values = Js.uncheckedCast(dataColumn); - final BigDecimalWrapper[] cleanData = new BigDecimalWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - final BigDecimal value = Js.cast(values.getAt(i)); - if (value == null) { - cleanData[i] = null; - } else { - cleanData[i] = new BigDecimalWrapper(value); - } - } - return cleanData; - } - case "java.math.BigInteger": { - final JsArray values = Js.uncheckedCast(dataColumn); - final BigIntegerWrapper[] cleanData = new BigIntegerWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - final BigInteger value = Js.cast(values.getAt(i)); - if (value == null) { - cleanData[i] = null; - } else { - cleanData[i] = new BigIntegerWrapper(value); - } - } - return cleanData; - } - case "java.time.LocalDate": { - final JsArray values = Js.uncheckedCast(dataColumn); - final LocalDateWrapper[] cleanData = new LocalDateWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - final LocalDate value = Js.cast(values.getAt(i)); - if (value == null) { - cleanData[i] = null; - } else { - cleanData[i] = new LocalDateWrapper(value); - } - } - return cleanData; - } - case "java.time.LocalTime": { - final JsArray values = Js.uncheckedCast(dataColumn); - final LocalTimeWrapper[] cleanData = new LocalTimeWrapper[values.length]; - for (int i = 0; i < values.length; i++) { - final LocalTime value = Js.cast(values.getAt(i)); - if (value == null) { - cleanData[i] = null; - } else { - cleanData[i] = new LocalTimeWrapper(value); - } - } - return cleanData; - } - default: - DataCleaner dataCleaner = getDataCleanerForColumnType(column.getType()); - if (dataCleaner != null) { - JsArray values = Js.uncheckedCast(dataColumn); - JsArray cleanData = Js.uncheckedCast(JsArray.from((JsArrayLike) values)); - - for (int i = 0; i < values.length; i++) { - dataCleaner.clean(cleanData, i); - } - - return cleanData; - } else { - return dataColumn; - } - } - } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java index 8a30411fd33..5c7ef400cb1 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java @@ -8,7 +8,7 @@ import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; -import io.deephaven.web.client.api.subscription.SubscriptionTableData.UpdateEventData; +import io.deephaven.web.client.api.subscription.AbstractTableSubscription; import io.deephaven.web.client.fu.JsSettings; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.fu.JsFunction; @@ -35,7 +35,7 @@ public ChartData(JsTable table) { this.table = table; } - public void update(UpdateEventData tableData) { + public void update(AbstractTableSubscription.UpdateEventData tableData) { Iterator addedIterator = tableData.getAdded().getRange().rangeIterator(); Iterator removedIterator = tableData.getRemoved().getRange().rangeIterator(); Iterator modifiedIterator = tableData.getModified().getRange().rangeIterator(); @@ -138,7 +138,7 @@ public void update(UpdateEventData tableData) { } } - private void replaceDataRange(UpdateEventData tableData, Range range, int offset) { + private void replaceDataRange(AbstractTableSubscription.UpdateEventData tableData, Range range, int offset) { // we don't touch the indexes at all, only need to walk each column and replace values in this range for (Entry, JsArray>> columnMap : cachedData.entrySet()) { Column col = table.findColumn(columnMap.getKey()); @@ -160,7 +160,7 @@ private void replaceDataRange(UpdateEventData tableData, Range range, int offset } } - private void insertDataRange(UpdateEventData tableData, Range range, int offset) { + private void insertDataRange(AbstractTableSubscription.UpdateEventData tableData, Range range, int offset) { // splice in the new indexes batchSplice(offset, asArray(indexes), longs(range)); @@ -195,7 +195,8 @@ private Any[] batchSplice(int offset, JsArray existingData, Any[] dataToIns return Js.uncheckedCast(existingData); } - private Any[] values(UpdateEventData tableData, JsFunction mapFunc, Column col, Range insertedRange) { + private Any[] values(AbstractTableSubscription.UpdateEventData tableData, JsFunction mapFunc, Column col, + Range insertedRange) { JsArray result = new JsArray<>(); if (mapFunc == null) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java index 2b1ed636861..bb3d6e0817d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java @@ -3,13 +3,12 @@ // package io.deephaven.web.client.api.widget.plot; -import elemental2.dom.CustomEvent; import elemental2.dom.CustomEventInit; import elemental2.promise.Promise; import io.deephaven.web.client.api.DateWrapper; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.LongWrapper; -import io.deephaven.web.client.api.subscription.SubscriptionTableData; +import io.deephaven.web.client.api.subscription.AbstractTableSubscription; import io.deephaven.web.client.api.subscription.TableSubscription; import io.deephaven.web.client.fu.JsLog; @@ -259,8 +258,8 @@ private Promise subscribe(final Promise tablePromise this.currentData = new ChartData(table); sub.addEventListener(TableSubscription.EVENT_UPDATED, e -> { // refire with specifics for the columns that we're watching here, after updating data arrays - SubscriptionTableData.UpdateEventData subscriptionUpdateData = - (SubscriptionTableData.UpdateEventData) ((CustomEvent) e).detail; + AbstractTableSubscription.UpdateEventData subscriptionUpdateData = + (AbstractTableSubscription.UpdateEventData) e.detail; currentData.update(subscriptionUpdateData); CustomEventInit event = CustomEventInit.create(); From 170483ec7e13f59b3ff8174e490d22bea39726eb Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 7 Jun 2024 20:50:23 -0500 Subject: [PATCH 059/219] Proposed assert change --- .../barrage/chunk/ByteChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/CharChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/DoubleChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/FloatChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/IntChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/LongChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/ShortChunkInputStreamGenerator.java | 4 +--- 7 files changed, 7 insertions(+), 21 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 59efe7bfb33..194e9982d95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 212ef4c33bd..14fd3f8f79b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -264,9 +264,7 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index e99ee922cac..a4108804f34 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 0daee6eb829..82046928670 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 667c1921c76..4646cc5bdc6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index d8da1b1807d..c0a76261d61 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 4bd912e646b..364b04d0d5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); From 097fbfe418a62c87a64b451580d4aacc53b7eeee Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 14 Jun 2024 12:15:19 -0500 Subject: [PATCH 060/219] Remove unused BarrageStreamReader param --- .../extensions/barrage/util/BarrageStreamReader.java | 1 - .../io/deephaven/extensions/barrage/util/StreamReader.java | 2 -- .../java/io/deephaven/client/impl/BarrageSnapshotImpl.java | 7 +------ .../io/deephaven/client/impl/BarrageSubscriptionImpl.java | 2 +- 4 files changed, 2 insertions(+), 10 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 834cda0ae00..db0130479ec 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -59,7 +59,6 @@ public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { @Override public BarrageMessage safelyParseFrom(final StreamReaderOptions options, - final BitSet expectedColumns, final ChunkType[] columnChunkTypes, final Class[] columnTypes, final Class[] componentTypes, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java index 93b35af0863..be389e894b6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java @@ -18,7 +18,6 @@ public interface StreamReader { * Converts an InputStream to a BarrageMessage in the context of the provided parameters. * * @param options the options related to parsing this message - * @param expectedColumns the columns expected to appear in the stream, null implies all columns * @param columnChunkTypes the types to use for each column chunk * @param columnTypes the actual type for the column * @param componentTypes the actual component type for the column @@ -26,7 +25,6 @@ public interface StreamReader { * @return a BarrageMessage filled out by the stream's payload */ BarrageMessage safelyParseFrom(final StreamReaderOptions options, - BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index 6310f2cab1f..9709676f7aa 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -63,8 +63,6 @@ public class BarrageSnapshotImpl extends ReferenceCountedLivenessNode implements private final BarrageTable resultTable; private final CompletableFuture

future; - private volatile BitSet expectedColumns; - private volatile int connected = 1; private static final AtomicIntegerFieldUpdater CONNECTED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(BarrageSnapshotImpl.class, "connected"); @@ -209,9 +207,6 @@ public Future
partialTable( alreadyUsed = true; } - // store this for streamreader parser - expectedColumns = columns; - // Send the snapshot request: observer.onNext(FlightData.newBuilder() .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns, reverseViewport, options))) @@ -355,7 +350,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, expectedColumns, columnChunkTypes, columnTypes, componentTypes, + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java index 28d844780c4..0ddf9b9e87b 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java @@ -429,7 +429,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, null, columnChunkTypes, columnTypes, componentTypes, stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } From 487dad4411c5665731e69fdd30592c8d92baa40b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 14 Jun 2024 13:57:38 -0500 Subject: [PATCH 061/219] Non-Instant tests pass --- .../web/client/api/JsPartitionedTable.java | 4 +- .../client/api/barrage/WebBarrageUtils.java | 6 +- .../barrage/data/WebBarrageSubscription.java | 41 ++---- .../AbstractTableSubscription.java | 118 +++++++++++++++--- .../api/subscription/TableSubscription.java | 8 ++ .../TableViewportSubscription.java | 91 +++++--------- .../web/client/api/tree/JsTreeTable.java | 12 +- .../web/client/state/ClientTableState.java | 4 +- 8 files changed, 161 insertions(+), 123 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java index 016522f310d..c8cc13e99ab 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsPartitionedTable.java @@ -96,8 +96,8 @@ public Promise refetch() { keys = result; keyColumnTypes = new ArrayList<>(); - InitialTableDefinition tableDefinition = WebBarrageUtils.readTableDefinition( - WebBarrageUtils.readSchemaMessage(descriptor.getConstituentDefinitionSchema_asU8())); + InitialTableDefinition tableDefinition = + WebBarrageUtils.readTableDefinition(descriptor.getConstituentDefinitionSchema_asU8()); ColumnDefinition[] columnDefinitions = tableDefinition.getColumns(); Column[] columns = new Column[0]; for (int i = 0; i < columnDefinitions.length; i++) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index e0cacec2e51..d62317e6102 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -69,6 +69,10 @@ public static Uint8Array emptyMessage() { return bbToUint8ArrayView(builder.dataBuffer()); } + public static InitialTableDefinition readTableDefinition(Uint8Array flightSchemaMessage) { + return readTableDefinition(readSchemaMessage(flightSchemaMessage)); + } + public static InitialTableDefinition readTableDefinition(Schema schema) { ColumnDefinition[] cols = readColumnDefinitions(schema); @@ -82,7 +86,7 @@ public static InitialTableDefinition readTableDefinition(Schema schema) { .setColumns(cols); } - public static ColumnDefinition[] readColumnDefinitions(Schema schema) { + private static ColumnDefinition[] readColumnDefinitions(Schema schema) { ColumnDefinition[] cols = new ColumnDefinition[(int) schema.fieldsLength()]; for (int i = 0; i < schema.fieldsLength(); i++) { cols[i] = new ColumnDefinition(i, schema.fields(i)); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 29f4025c6b5..e45c262fd51 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -3,10 +3,8 @@ // package io.deephaven.web.client.api.barrage.data; -import com.google.flatbuffers.FlatBufferBuilder; import elemental2.core.JsArray; import elemental2.dom.DomGlobal; -import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.DoubleChunk; @@ -15,7 +13,6 @@ import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.web.client.api.barrage.CompressedRangeSetReader; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.fu.JsData; @@ -25,7 +22,6 @@ import io.deephaven.web.shared.data.ShiftedRange; import jsinterop.base.Any; import jsinterop.base.Js; -import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.BitSet; @@ -171,28 +167,6 @@ public T get(long position) { return new RedirectedImpl(cts, viewportChangedHandler, dataChangedHandler, dataSinks); } - public static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSet columns, @Nullable RangeSet viewport, - io.deephaven.extensions.barrage.BarrageSubscriptionOptions options, boolean isReverseViewport) { - FlatBufferBuilder sub = new FlatBufferBuilder(1024); - int colOffset = BarrageSubscriptionRequest.createColumnsVector(sub, columns.toByteArray()); - int viewportOffset = 0; - if (viewport != null) { - viewportOffset = - BarrageSubscriptionRequest.createViewportVector(sub, CompressedRangeSetReader.writeRange(viewport)); - } - int optionsOffset = options.appendTo(sub); - int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(sub, tableTicket); - BarrageSubscriptionRequest.startBarrageSubscriptionRequest(sub); - BarrageSubscriptionRequest.addColumns(sub, colOffset); - BarrageSubscriptionRequest.addViewport(sub, viewportOffset); - BarrageSubscriptionRequest.addSubscriptionOptions(sub, optionsOffset); - BarrageSubscriptionRequest.addTicket(sub, tableTicketOffset); - BarrageSubscriptionRequest.addReverseViewport(sub, isReverseViewport); - sub.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(sub)); - - return sub; - } - public interface ViewportChangedHandler { void onServerViewportChanged(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport); } @@ -215,13 +189,13 @@ default void ensureCapacity(long size) {} protected final RangeSet currentRowSet = RangeSet.empty(); protected long capacity = 0; - protected final WebDataSink[] destSources; + protected WebDataSink[] destSources; protected RangeSet serverViewport; protected BitSet serverColumns; protected boolean serverReverseViewport; - public WebBarrageSubscription(ClientTableState state, ViewportChangedHandler viewportChangedHandler, + protected WebBarrageSubscription(ClientTableState state, ViewportChangedHandler viewportChangedHandler, DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { this.state = state; destSources = dataSinks; @@ -253,6 +227,14 @@ public RangeSet getServerViewport() { return serverViewport; } + /** + * Reads a value from the table subscription. + * + * @param key the row to read in key-space + * @param col the index of the column to read + * @return the value read from the table + * @param the expected type of the column to read + */ public abstract T getData(long key, int col); protected boolean isSubscribedColumn(int ii) { @@ -510,7 +492,6 @@ private RangeSet getFreeRows(long size) { Iterator iterator = freeset.rangeIterator(); int required = (int) size; while (required > 0 && iterator.hasNext()) { - assert iterator.hasNext(); Range next = iterator.next(); Range range = next.size() < required ? next : new Range(next.getFirst(), next.getFirst() + required - 1); @@ -560,7 +541,7 @@ private void freeRows(RangeSet removed) { /** * Helper to avoid appending many times when modifying indexes. The append() method should be called for each key - * _in order_ to ensure that addRange/removeRange isn't called excessively. When no more items will be added, + * in order to ensure that addRange/removeRange isn't called excessively. When no more items will be added, * flush() must be called. */ private static class RangeSetBulkHelper { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 48615a08e8d..3410064079a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -10,6 +10,7 @@ import elemental2.core.JsArray; import elemental2.dom.CustomEventInit; import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; @@ -20,6 +21,7 @@ import io.deephaven.web.client.api.LongWrapper; import io.deephaven.web.client.api.TableData; import io.deephaven.web.client.api.WorkerConnection; +import io.deephaven.web.client.api.barrage.CompressedRangeSetReader; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; import io.deephaven.web.client.api.barrage.WebBarrageUtils; @@ -33,10 +35,21 @@ import jsinterop.annotations.JsProperty; import jsinterop.base.Any; import jsinterop.base.Js; +import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.BitSet; +/** + * Superclass of various subscription types, allowing specific implementations to customize behavior for their needs. + *

+ * Instances are not ready to use right away, owing to the fact that we need to wait both for the provided state to resolve + * (so that we have the table schema, know what kind of subscription we will make, and know what column types will + * be resolves), and because until the subscription has finished being set up, we will not have received the size + * of the table. When closed, it cannot be reused again. + *

+ * This is also a base class for types exposed to JS. + */ public abstract class AbstractTableSubscription extends HasEventHandling { /** * Indicates that some new data is available on the client, either an initial snapshot or a delta update. The @@ -45,6 +58,17 @@ public abstract class AbstractTableSubscription extends HasEventHandling { */ public static final String EVENT_UPDATED = "updated"; + public enum Status { + /** Waiting for some prerequisite before we can use it for the first time. */ + STARTING, + /** Successfully created, not waiting for any messages to be accurate. */ + ACTIVE, + /** Waiting for an update to return from being active to being active again. */ + PENDING_UPDATE, + /** Closed or otherwise stopped, cannot be used again. */ + DONE; + } + private final ClientTableState state; private final WorkerConnection connection; private final int rowStyleColumn; @@ -52,9 +76,11 @@ public abstract class AbstractTableSubscription extends HasEventHandling { private BitSet columnBitSet; private BarrageSubscriptionOptions options; - private final BiDiStream doExchange; - protected final WebBarrageSubscription barrageSubscription; + private BiDiStream doExchange; + protected WebBarrageSubscription barrageSubscription; + protected Status status = Status.STARTING; + @Deprecated// remove this, use status instead private boolean subscriptionReady; public AbstractTableSubscription(ClientTableState state, WorkerConnection connection) { @@ -64,24 +90,72 @@ public AbstractTableSubscription(ClientTableState state, WorkerConnection connec rowStyleColumn = state.getRowFormatColumn() == null ? TableData.NO_ROW_FORMAT_COLUMN : state.getRowFormatColumn().getIndex(); - doExchange = - connection.streamFactory().create( - headers -> connection.flightServiceClient().doExchange(headers), - (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), - (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, - c::apply), - new FlightData()); - doExchange.onData(this::onFlightData); - // TODO handle stream ending, error - doExchange.onEnd(this::onStreamEnd); - // TODO going to need "started change" so we don't let data escape when still updating - barrageSubscription = WebBarrageSubscription.subscribe(state, this::onViewportChange, this::onDataChanged); + // Once the state is running, set up the actual subscription + state.onRunning(s -> { + if (status != Status.STARTING) { + // already closed + return; + } + // TODO going to need "started change" so we don't let data escape when still updating + WebBarrageSubscription.ViewportChangedHandler viewportChangedHandler = this::onViewportChange; + WebBarrageSubscription.DataChangedHandler dataChangedHandler = this::onDataChanged; + + status = Status.ACTIVE; + this.barrageSubscription = WebBarrageSubscription.subscribe(state, viewportChangedHandler, dataChangedHandler); + + doExchange = + connection.streamFactory().create( + headers -> connection.flightServiceClient().doExchange(headers), + (first, headers) -> connection.browserFlightServiceClient().openDoExchange(first, headers), + (next, headers, c) -> connection.browserFlightServiceClient().nextDoExchange(next, headers, + c::apply), + new FlightData()); + + doExchange.onData(this::onFlightData); + // TODO handle stream ending, error + doExchange.onEnd(this::onStreamEnd); + + sendFirstSubscriptionRequest(); + }, () -> { + // TODO fail + + }); + } + public Status getStatus() { + return status; + } + + protected static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSet columns, @Nullable RangeSet viewport, + BarrageSubscriptionOptions options, boolean isReverseViewport) { + FlatBufferBuilder sub = new FlatBufferBuilder(1024); + int colOffset = BarrageSubscriptionRequest.createColumnsVector(sub, columns.toByteArray()); + int viewportOffset = 0; + if (viewport != null) { + viewportOffset = + BarrageSubscriptionRequest.createViewportVector(sub, CompressedRangeSetReader.writeRange(viewport)); + } + int optionsOffset = options.appendTo(sub); + int tableTicketOffset = BarrageSubscriptionRequest.createTicketVector(sub, tableTicket); + BarrageSubscriptionRequest.startBarrageSubscriptionRequest(sub); + BarrageSubscriptionRequest.addColumns(sub, colOffset); + BarrageSubscriptionRequest.addViewport(sub, viewportOffset); + BarrageSubscriptionRequest.addSubscriptionOptions(sub, optionsOffset); + BarrageSubscriptionRequest.addTicket(sub, tableTicketOffset); + BarrageSubscriptionRequest.addReverseViewport(sub, isReverseViewport); + sub.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(sub)); + + return sub; + } + + protected abstract void sendFirstSubscriptionRequest(); + protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray columns, Double updateIntervalMs, boolean isReverseViewport) { + assert status == Status.ACTIVE || status == Status.PENDING_UPDATE : status; this.columns = columns; this.columnBitSet = makeColumnBitset(columns); // TODO validate that we can change updateinterval @@ -90,9 +164,9 @@ protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray .maxMessageSize(WebBarrageSubscription.MAX_MESSAGE_SIZE) .columnConversionMode(ColumnConversionMode.Stringify) .minUpdateIntervalMs(updateIntervalMs == null ? 0 : (int) (double) updateIntervalMs) - .columnsAsList(false) + .columnsAsList(false)// TODO flip this to true .build(); - FlatBufferBuilder request = WebBarrageSubscription.subscriptionRequest( + FlatBufferBuilder request = subscriptionRequest( Js.uncheckedCast(state.getHandle().getTicket()), columnBitSet, viewport, @@ -121,7 +195,10 @@ protected boolean isSubscriptionReady() { } public double size() { - return barrageSubscription.getCurrentRowSet().size(); + if (status == Status.ACTIVE) { + return barrageSubscription.getCurrentRowSet().size(); + } + return state.getSize(); } private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted, @@ -366,7 +443,10 @@ public JsArray getColumns() { * Stops the subscription on the server. */ public void close() { - doExchange.end(); - doExchange.cancel(); + if (doExchange != null) { + doExchange.end(); + doExchange.cancel(); + } + status = Status.DONE; } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java index 7ac6903f0ec..3533411e32f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java @@ -26,10 +26,18 @@ @JsType(namespace = "dh") public final class TableSubscription extends AbstractTableSubscription { + private final JsArray columns; + private final Double updateIntervalMs; + @JsIgnore public TableSubscription(JsArray columns, JsTable existingTable, Double updateIntervalMs) { super(existingTable.state(), existingTable.getConnection()); + this.columns = columns; + this.updateIntervalMs = updateIntervalMs; + } + @Override + protected void sendFirstSubscriptionRequest() { changeSubscription(columns, updateIntervalMs); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 1245ee4d323..521121e21b5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -3,51 +3,24 @@ // package io.deephaven.web.client.api.subscription; -import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; -import elemental2.core.Uint8Array; import elemental2.dom.CustomEvent; import elemental2.dom.CustomEventInit; import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; import elemental2.promise.Promise; -import io.deephaven.barrage.flatbuf.BarrageMessageType; -import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.barrage.flatbuf.BarrageSnapshotOptions; -import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; -import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; -import io.deephaven.barrage.flatbuf.ColumnConversionMode; -import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; -import io.deephaven.web.client.api.Callbacks; import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; -import io.deephaven.web.client.api.WorkerConnection; -import io.deephaven.web.client.api.barrage.WebBarrageUtils; -import io.deephaven.web.client.api.barrage.def.ColumnDefinition; -import io.deephaven.web.client.api.barrage.stream.BiDiStream; -import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.fu.LazyPromise; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; -import io.deephaven.web.shared.data.TableSnapshot; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsOptional; import jsinterop.base.Js; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; -import org.gwtproject.nio.TypedArrayHelper; - -import java.util.Arrays; -import java.util.BitSet; -import java.util.Collections; - -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; -import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; /** * Encapsulates event handling around table subscriptions by "cheating" and wrapping up a JsTable instance to do the @@ -84,28 +57,10 @@ public class TableViewportSubscription extends AbstractTableSubscription { // TODO move to superclass and check on viewport change private RangeSet serverViewport; - /** - * Describes the possible lifecycle of the viewport as far as anything external cares about it - */ - public enum Status { - /** - * Waiting for some prerequisite before we can begin, usually waiting to make sure the original table is ready - * to be subscribed to. Once the original table is ready, we will enter the ACTIVE state, even if the first - * update hasn't yet arrived. - */ - STARTING, - /** - * Successfully created, viewport is at least begun on the server, updates are subscribed and if changes happen - * on the server, we will be notified. - */ - ACTIVE, - /** - * Closed or otherwise dead, can not be used again. - */ - DONE - } - - private final double refresh; + private double firstRow; + private double lastRow; + private Column[] columns; + private double refresh; private final JsTable original; @@ -119,20 +74,25 @@ public enum Status { */ private boolean retained; - private Status status = Status.STARTING; private UpdateEventData viewportData; public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, JsTable existingTable) { super(existingTable.state(), existingTable.getConnection()); - - setInternalViewport(firstRow, lastRow, columns, updateIntervalMs, null); + this.firstRow = firstRow; + this.lastRow = lastRow; + this.columns = columns; refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; this.original = existingTable; } + @Override + protected void sendFirstSubscriptionRequest() { + setInternalViewport(firstRow, lastRow, columns, refresh, null); + } + @Override protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { // viewport subscriptions are sometimes required to notify of size change events @@ -233,6 +193,13 @@ public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable public void setInternalViewport(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, Boolean isReverseViewport) { + if (status == Status.STARTING) { + this.firstRow = firstRow; + this.lastRow = lastRow; + this.columns = columns; + this.refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; + return; + } if (updateIntervalMs != null && refresh != updateIntervalMs) { throw new IllegalArgumentException( "Can't change refreshIntervalMs on a later call to setViewport, it must be consistent or omitted"); @@ -253,9 +220,9 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column */ @JsMethod public void close() { - if (status == Status.DONE) { - JsLog.warn("TableViewportSubscription.close called on subscription that's already done."); - } +// if (status == Status.DONE) { +// JsLog.warn("TableViewportSubscription.close called on subscription that's already done."); +// } retained = false; // Instead of calling super.close(), we delegate to internalClose() @@ -270,13 +237,13 @@ public void internalClose() { // indicate that the base table shouldn't get events anymore, even if it is still retained elsewhere originalActive = false; - if (retained || status == Status.DONE) { - // the JsTable has indicated it is no longer interested in this viewport, but other calling - // code has retained it, keep it open for now. - return; - } - - status = Status.DONE; +// if (retained || status == Status.DONE) { +// // the JsTable has indicated it is no longer interested in this viewport, but other calling +// // code has retained it, keep it open for now. +// return; +// } +// +// status = Status.DONE; super.close(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 3a9ae037268..d6c91fc7253 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -50,14 +50,11 @@ import jsinterop.annotations.JsType; import jsinterop.base.Any; import jsinterop.base.Js; -import org.apache.arrow.flatbuf.Schema; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; -import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; - /** * Behaves like a {@link JsTable} externally, but data, state, and viewports are managed by an entirely different * mechanism, and so reimplemented here. @@ -184,10 +181,9 @@ public JsTreeTable(WorkerConnection workerConnection, JsWidget widget) { HierarchicalTableDescriptor.deserializeBinary(widget.getDataAsU8()); Uint8Array flightSchemaMessage = treeDescriptor.getSnapshotSchema_asU8(); - Schema schema = WebBarrageUtils.readSchemaMessage(flightSchemaMessage); this.isRefreshing = !treeDescriptor.getIsStatic(); - this.tableDefinition = WebBarrageUtils.readTableDefinition(schema); + this.tableDefinition = WebBarrageUtils.readTableDefinition(flightSchemaMessage); Column[] columns = new Column[0]; Map> columnDefsByName = tableDefinition.getColumnsByName(); int rowFormatColumn = -1; @@ -580,6 +576,11 @@ public TreeSubscription(ClientTableState state, WorkerConnection connection) { super(state, connection); } + @Override + protected void sendFirstSubscriptionRequest() { + setViewport(firstRow, lastRow, Js.uncheckedCast(columns), (double) updateInterval); + } + @Override protected BitSet makeColumnBitset(JsArray columns) { BitSet requested = super.makeColumnBitset(columns); @@ -686,7 +687,6 @@ private void replaceSubscription(RebuildStep step) { state.applyTableCreationResponse(def); TreeSubscription subscription = new TreeSubscription(state, connection); - subscription.setViewport(firstRow, lastRow, Js.uncheckedCast(columns), (double) updateInterval); subscription.addEventListener(TreeSubscription.EVENT_UPDATED, (CustomEvent data) -> { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 2a6dd7c0593..b6b6312e01b 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -1065,9 +1065,7 @@ public void applyTableCreationResponse(ExportedTableCreationResponse def) { Uint8Array flightSchemaMessage = def.getSchemaHeader_asU8(); isStatic = def.getIsStatic(); - Schema schema = WebBarrageUtils.readSchemaMessage(flightSchemaMessage); - - setTableDef(WebBarrageUtils.readTableDefinition(schema)); + setTableDef(WebBarrageUtils.readTableDefinition(WebBarrageUtils.readSchemaMessage(flightSchemaMessage))); setResolution(ResolutionState.RUNNING); setSize(Long.parseLong(def.getSize())); From 177f50d2fc6630d6f82d3e8cfb54d228fb3bcb40 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:21:25 -0500 Subject: [PATCH 062/219] Remove dead class --- .../util/BarrageChunkAppendingMarshaller.java | 206 ------------------ .../test/FlightMessageRoundTripTest.java | 48 ---- 2 files changed, 254 deletions(-) delete mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java deleted file mode 100644 index edbf60d72a1..00000000000 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java +++ /dev/null @@ -1,206 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.extensions.barrage.util; - -import com.google.common.io.LittleEndianDataInputStream; -import com.google.protobuf.CodedInputStream; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.BarrageSnapshotOptions; -import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; -import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.chunk.ChunkType; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.grpc.MethodDescriptor; -import io.grpc.protobuf.ProtoUtils; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; -import org.apache.arrow.flight.impl.Flight; -import org.apache.arrow.flight.impl.FlightServiceGrpc; - -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; - -/** - * This class is used to append the results of a DoGet directly into destination {@link WritableChunk}. - *

- * It will append the results of a DoGet into the destination chunks, and notify the listener of the number of rows - * appended to the record batch in total. The user will typically want to wait for OnCompletion to be called before - * assuming they have received all the data. - */ -public class BarrageChunkAppendingMarshaller implements MethodDescriptor.Marshaller { - - /** - * Fetch the client side descriptor for a specific DoGet invocation. - *

- * Instead of providing BarrageMessage as the response type, this custom marshaller will return the number of rows - * appended after each RecordBatch. This is informative yet hands-off process reading data into the chunks. - * - * @param columnChunkTypes the chunk types per column - * @param columnTypes the class type per column - * @param componentTypes the component class type per column - * @param destChunks the destination chunks - * @return the client side method descriptor - */ - public static MethodDescriptor getClientDoGetDescriptor( - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - final MethodDescriptor.Marshaller requestMarshaller = - ProtoUtils.marshaller(Flight.Ticket.getDefaultInstance()); - final MethodDescriptor descriptor = FlightServiceGrpc.getDoGetMethod(); - - return MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.SERVER_STREAMING) - .setFullMethodName(descriptor.getFullMethodName()) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(new BarrageChunkAppendingMarshaller( - BARRAGE_OPTIONS, columnChunkTypes, columnTypes, componentTypes, destChunks)) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(); - } - - // DoGet does not get to set any options - private static final BarrageSnapshotOptions BARRAGE_OPTIONS = BarrageSnapshotOptions.builder().build(); - - private static final Logger log = LoggerFactory.getLogger(BarrageChunkAppendingMarshaller.class); - - private final BarrageSnapshotOptions options; - - private final ChunkType[] columnChunkTypes; - private final Class[] columnTypes; - private final Class[] componentTypes; - - private final WritableChunk[] destChunks; - private long numRowsRead = 0; - - public BarrageChunkAppendingMarshaller( - final BarrageSnapshotOptions options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - this.options = options; - this.columnChunkTypes = columnChunkTypes; - this.columnTypes = columnTypes; - this.componentTypes = componentTypes; - this.destChunks = destChunks; - } - - @Override - public InputStream stream(final Integer value) { - throw new UnsupportedOperationException( - "BarrageDataMarshaller unexpectedly used to directly convert BarrageMessage to InputStream"); - } - - @Override - public Integer parse(final InputStream stream) { - Message header = null; - try { - boolean bodyParsed = false; - - final CodedInputStream decoder = CodedInputStream.newInstance(stream); - - for (int tag = decoder.readTag(); tag != 0; tag = decoder.readTag()) { - if (tag == BarrageProtoUtil.DATA_HEADER_TAG) { - final int size = decoder.readRawVarint32(); - header = Message.getRootAsMessage(ByteBuffer.wrap(decoder.readRawBytes(size))); - continue; - } else if (tag != BarrageProtoUtil.BODY_TAG) { - decoder.skipField(tag); - continue; - } - - if (bodyParsed) { - // although not an error for protobuf, arrow payloads should consider it one - throw new IllegalStateException("Unexpected duplicate body tag"); - } - - if (header == null) { - throw new IllegalStateException("Missing metadata header; cannot decode body"); - } - - if (header.headerType() != org.apache.arrow.flatbuf.MessageHeader.RecordBatch) { - throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); - } - - bodyParsed = true; - final int size = decoder.readRawVarint32(); - final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); - - // noinspection UnstableApiUsage - try (final LittleEndianDataInputStream ois = - new LittleEndianDataInputStream(new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size))) { - final Iterator fieldNodeIter = - new FlatBufferIteratorAdapter<>(batch.nodesLength(), - i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); - - final long[] bufferInfo = new long[batch.buffersLength()]; - for (int bi = 0; bi < batch.buffersLength(); ++bi) { - int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); - int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); - if (bi < batch.buffersLength() - 1) { - final int nextOffset = - LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); - // our parsers handle overhanging buffers - length += Math.max(0, nextOffset - offset - length); - } - bufferInfo[bi] = length; - } - final PrimitiveIterator.OfLong bufferInfoIter = Arrays.stream(bufferInfo).iterator(); - - for (int ci = 0; ci < destChunks.length; ++ci) { - final WritableChunk dest = destChunks[ci]; - - final long remaining = dest.capacity() - dest.size(); - if (batch.length() > remaining) { - throw new BarrageMarshallingException(String.format("Received RecordBatch length (%d) " + - "exceeds the remaining capacity (%d) of the destination Chunk.", batch.length(), - remaining)); - } - - // Barrage should return the provided chunk since there was enough room to append the data - final WritableChunk retChunk = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, - bufferInfoIter, ois, dest, dest.size(), (int) batch.length()); - - if (retChunk != dest) { - throw new BarrageMarshallingException("Unexpected chunk returned from " + - "ChunkInputStreamGenerator.extractChunkFromInputStream"); - } - - // barrage does not alter the destination chunk size, so let's set it ourselves - dest.setSize(dest.size() + (int) batch.length()); - } - numRowsRead += batch.length(); - } - } - - if (header != null && header.headerType() == MessageHeader.Schema) { - // getting started, but no rows yet; schemas do not have body tags - return 0; - } - - if (!bodyParsed) { - throw new IllegalStateException("Missing body tag"); - } - - // we're appending directly to the chunk, but courteously let our user know how many rows were read - return (int) numRowsRead; - } catch (final Exception e) { - log.error().append("Unable to parse a received DoGet: ").append(e).endl(); - if (e instanceof BarrageMarshallingException) { - throw (BarrageMarshallingException) e; - } - throw new GrpcMarshallingException("Unable to parse DoGet", e); - } - } -} diff --git a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java index 95646536910..bbc47c6f1d3 100644 --- a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java +++ b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java @@ -38,7 +38,6 @@ import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; -import io.deephaven.extensions.barrage.util.BarrageChunkAppendingMarshaller; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.io.logger.LogBuffer; import io.deephaven.io.logger.LogBufferGlobal; @@ -1065,53 +1064,6 @@ private void assertRoundTripDataEqual(Table deephavenTable, Consumer[] wireTypes = convertedSchema.computeWireTypes(); - final Class[] wireComponentTypes = convertedSchema.computeWireComponentTypes(); - - // noinspection unchecked - final WritableChunk[] destChunks = Arrays.stream(wireChunkTypes) - .map(chunkType -> chunkType.makeWritableChunk(size)).toArray(WritableChunk[]::new); - // zero out the chunks as the marshaller will append to them. - Arrays.stream(destChunks).forEach(dest -> dest.setSize(0)); - - final MethodDescriptor methodDescriptor = BarrageChunkAppendingMarshaller - .getClientDoGetDescriptor(wireChunkTypes, wireTypes, wireComponentTypes, destChunks); - - final Ticket ticket = new Ticket("s/test".getBytes(StandardCharsets.UTF_8)); - final Iterator msgIter = ClientCalls.blockingServerStreamingCall( - clientChannel, methodDescriptor, CallOptions.DEFAULT, - Flight.Ticket.newBuilder().setTicket(ByteString.copyFrom(ticket.getBytes())).build()); - - long totalRows = 0; - while (msgIter.hasNext()) { - totalRows += msgIter.next(); - } - Assert.eq(totalRows, "totalRows", size, "size"); - final LongChunk col_i = destChunks[0].asLongChunk(); - final ObjectChunk col_j = destChunks[1].asObjectChunk(); - Assert.eq(col_i.size(), "col_i.size()", size, "size"); - Assert.eq(col_j.size(), "col_j.size()", size, "size"); - for (int i = 0; i < size; ++i) { - Assert.eq(col_i.get(i), "col_i.get(i)", i, "i"); - Assert.equals(col_j.get(i), "col_j.get(i)", "str_" + i, "str_" + i); - } - } - @Test public void testColumnsAsListFeature() throws Exception { // bind the table in the session From 1685b80dc68f42dcc91be97c39802bd6de756dd1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:25:46 -0500 Subject: [PATCH 063/219] Make assertion provide more info --- .../barrage/chunk/ByteChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/CharChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/DoubleChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/FloatChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/IntChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/LongChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/ShortChunkInputStreamGenerator.java | 4 +--- 7 files changed, 7 insertions(+), 21 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 59efe7bfb33..194e9982d95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 212ef4c33bd..14fd3f8f79b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -264,9 +264,7 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index e99ee922cac..a4108804f34 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 0daee6eb829..82046928670 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 667c1921c76..4646cc5bdc6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index d8da1b1807d..c0a76261d61 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 4bd912e646b..364b04d0d5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); From 2dc3afc437cd9ec854bec0dcb82f2a1b8f429c90 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:44:05 -0500 Subject: [PATCH 064/219] Remove unused BitSet param --- .../extensions/barrage/util/BarrageStreamReader.java | 1 - .../deephaven/extensions/barrage/util/StreamReader.java | 2 -- .../io/deephaven/client/impl/BarrageSnapshotImpl.java | 8 +------- .../io/deephaven/client/impl/BarrageSubscriptionImpl.java | 2 +- 4 files changed, 2 insertions(+), 11 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 3d749adee21..d535ffd0254 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -59,7 +59,6 @@ public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { @Override public BarrageMessage safelyParseFrom(final StreamReaderOptions options, - final BitSet expectedColumns, final ChunkType[] columnChunkTypes, final Class[] columnTypes, final Class[] componentTypes, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java index 93b35af0863..be389e894b6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java @@ -18,7 +18,6 @@ public interface StreamReader { * Converts an InputStream to a BarrageMessage in the context of the provided parameters. * * @param options the options related to parsing this message - * @param expectedColumns the columns expected to appear in the stream, null implies all columns * @param columnChunkTypes the types to use for each column chunk * @param columnTypes the actual type for the column * @param componentTypes the actual component type for the column @@ -26,7 +25,6 @@ public interface StreamReader { * @return a BarrageMessage filled out by the stream's payload */ BarrageMessage safelyParseFrom(final StreamReaderOptions options, - BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index 6310f2cab1f..e0e0b1f7741 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -63,8 +63,6 @@ public class BarrageSnapshotImpl extends ReferenceCountedLivenessNode implements private final BarrageTable resultTable; private final CompletableFuture

future; - private volatile BitSet expectedColumns; - private volatile int connected = 1; private static final AtomicIntegerFieldUpdater CONNECTED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(BarrageSnapshotImpl.class, "connected"); @@ -209,9 +207,6 @@ public Future
partialTable( alreadyUsed = true; } - // store this for streamreader parser - expectedColumns = columns; - // Send the snapshot request: observer.onNext(FlightData.newBuilder() .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns, reverseViewport, options))) @@ -355,8 +350,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, expectedColumns, columnChunkTypes, columnTypes, componentTypes, - stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java index 28d844780c4..0ddf9b9e87b 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java @@ -429,7 +429,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, null, columnChunkTypes, columnTypes, componentTypes, stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } From 0266d6c2fc8f4cde1fbc8a0ed11128cb4db9548c Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 12:19:47 -0500 Subject: [PATCH 065/219] Move BYTES constant into each impl --- .../chunk/ByteChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/CharChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ChunkInputStreamGenerator.java | 22 +++++++++---------- .../DoubleChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/FloatChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/IntChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/LongChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ShortChunkInputStreamGenerator.java | 16 +++++--------- 8 files changed, 52 insertions(+), 82 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 194e9982d95..c2cc7f3e453 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ByteConversion { } static WritableByteChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableByteChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableByteChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableByteChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ByteConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ByteConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 14fd3f8f79b..878bc0a6cd6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -172,7 +172,6 @@ public interface CharConversion { } static WritableCharChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -181,12 +180,11 @@ static WritableCharChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -197,7 +195,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableCharChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -220,7 +218,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableCharChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final CharConversion conversion, final Iterator fieldNodeIter, @@ -263,13 +260,13 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -314,7 +311,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final CharConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -333,7 +329,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -351,7 +347,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 92c62dd1d00..4a3b01b48fb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -213,36 +213,34 @@ static WritableChunk extractChunkFromInputStream( throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: return CharChunkInputStreamGenerator.extractChunkFromInputStream( - Character.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (type == Boolean.class || type == boolean.class) { return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - Byte.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - Short.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: return IntChunkInputStreamGenerator.extractChunkFromInputStream( - Integer.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { return LongChunkInputStreamGenerator.extractChunkFromInputStream( - Long.BYTES, options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - Long.BYTES, options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - Float.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - Double.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (type.isArray()) { if (componentType == byte.class) { @@ -347,7 +345,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalDate.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalDate.ofEpochDay(value / MS_PER_DAY), @@ -355,7 +353,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalTime.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index a4108804f34..c5283a02364 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface DoubleConversion { } static WritableDoubleChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableDoubleChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableDoubleChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final DoubleConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final DoubleConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 82046928670..19b52593bff 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface FloatConversion { } static WritableFloatChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableFloatChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableFloatChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final FloatConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final FloatConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 4646cc5bdc6..91714f4dd43 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface IntConversion { } static WritableIntChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableIntChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableIntChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableIntChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final IntConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final IntConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index c0a76261d61..a28c4006d1d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface LongConversion { } static WritableLongChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableLongChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableLongChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableLongChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final LongConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final LongConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 364b04d0d5f..68a2ecf86b1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ShortConversion { } static WritableShortChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableShortChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableShortChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableShortChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ShortConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ShortConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } From 56f28010ebae719e150a2361e0ef86c6b88a4ac7 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 24 Jun 2024 20:36:48 -0500 Subject: [PATCH 066/219] Commit #1 reading chunks, checkpoint to talk to nate, next will try reading schema at beginning of stream --- .../chunk/ChunkInputStreamGenerator.java | 173 +-------------- .../barrage/chunk/ChunkReadingFactory.java | 116 ++++++++++ .../chunk/DefaultChunkReadingFactory.java | 207 ++++++++++++++++++ .../VarListChunkInputStreamGenerator.java | 20 +- .../VectorChunkInputStreamGenerator.java | 23 +- .../barrage/util/ArrowToTableConverter.java | 10 +- .../barrage/util/BarrageStreamReader.java | 19 +- .../extensions/barrage/util/BarrageUtil.java | 4 + .../client/impl/BarrageSnapshotImpl.java | 3 +- 9 files changed, 385 insertions(+), 190 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 4a3b01b48fb..98cf03908e8 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -10,7 +10,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -19,7 +18,6 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.util.SafeCloseable; -import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; import org.jetbrains.annotations.Nullable; @@ -31,7 +29,6 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZonedDateTime; -import java.util.Arrays; import java.util.Iterator; import java.util.PrimitiveIterator; @@ -187,6 +184,7 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( } } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final ChunkType chunkType, final Class type, final Class componentType, @@ -195,10 +193,10 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, - is, - outChunk, offset, totalRows); + is, outChunk, offset, totalRows); } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, @@ -207,168 +205,9 @@ static WritableChunk extractChunkFromInputStream( final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats - switch (chunkType) { - case Boolean: - throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); - case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Byte: - if (type == Boolean.class || type == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Long: - if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Object: - if (type.isArray()) { - if (componentType == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); - } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream( - options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return VectorChunkInputStreamGenerator.extractChunkFromInputStream( - options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, outOffset, totalRows); - } - if (type == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); - } - if (type == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); - } - if (type == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == String.class || - options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); - } - throw new UnsupportedOperationException( - "Do not yet support column conversion mode: " + options.columnConversionMode()); - default: - throw new UnsupportedOperationException(); - } + return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, + bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java new file mode 100644 index 00000000000..d7cfb18db00 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -0,0 +1,116 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.Type; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * + */ +public interface ChunkReadingFactory { + /** + * + */ + class ChunkTypeInfo { + private final ChunkType chunkType; + private final Class type; + private final Class componentType; + private final Field arrowField; + + public ChunkTypeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { + this.chunkType = chunkType; + this.type = type; + this.componentType = componentType; + this.arrowField = arrowField; + } + + public ChunkType chunkType() { + return chunkType; + } + + public Class type() { + return type; + } + + public Class componentType() { + return componentType; + } + + public Field arrowField() { + return arrowField; + } + + public Field componentArrowField() { + if (arrowField.typeType() != Type.List) { + throw new IllegalStateException("Not a flight List"); + } + if (arrowField.childrenLength() != 1) { + throw new IllegalStateException("Incorrect number of child Fields"); + } + return arrowField.children(0); + } + } + + /** + * + * @param options + * @param factor + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + * @throws IOException + */ + WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final int factor, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; + + /** + * + * @param options + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param offset + * @param totalRows + * @return + * @throws IOException + */ + default WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int offset, + final int totalRows) throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, + totalRows); + } + +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java new file mode 100644 index 00000000000..e7a5bcd665c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -0,0 +1,207 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import com.google.common.base.Charsets; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.type.TypeUtils; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; + +/** + * JVM implementation of ChunkReadingFactory, suitable for use in Java clients and servers. This default implementations + * may not round trip flight types correctly, but will round trip Deephaven table definitions and table data. Neither of + * these is a required/expected property of being a Flight/Barrage/Deephaven client. + */ +public final class DefaultChunkReadingFactory implements ChunkReadingFactory { + public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); + + @Override + public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo, Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats + switch (typeInfo.chunkType()) { + case Boolean: + throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); + case Char: + return CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Byte: + if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { + return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Short: + return ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Int: + return IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Long: + if (factor == 1) { + return LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Float: + return FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Double: + return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Object: + if (typeInfo.type().isArray()) { + if (typeInfo.componentType() == byte.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); + } else { + return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + } + } + if (Vector.class.isAssignableFrom(typeInfo.type())) { + return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); + } + if (typeInfo.type() == BigInteger.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == BigDecimal.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Instant.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == ZonedDateTime.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Byte.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Character.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Double.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Float.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Integer.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Long.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Short.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalDate.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalTime.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == String.class || + options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); + } + throw new UnsupportedOperationException( + "Do not yet support column conversion mode: " + options.columnConversionMode()); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index 0a109230ca6..e1075e7dcf3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -235,19 +235,20 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk extractChunkFromInputStream( final StreamReaderOptions options, - final Class type, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = type.getComponentType(); + final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; final ChunkType chunkType; @@ -259,8 +260,11 @@ static WritableObjectChunk extractChunkFromInputStream( } if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, fieldNodeIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } @@ -299,8 +303,10 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 35245b11631..b7bb8cee6a4 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -235,25 +235,29 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk, Values> extractChunkFromInputStream( final StreamReaderOptions options, - final Class> type, - final Class inComponentType, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = VectorExpansionKernel.getComponentType(type, inComponentType); + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { @@ -296,8 +300,11 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 71f8a81b0fe..b0c62c652c1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,8 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.io.streams.ByteBufferInputStream; import io.deephaven.proto.util.Exceptions; @@ -45,6 +47,7 @@ public class ArrowToTableConverter { private Class[] columnTypes; private Class[] componentTypes; protected BarrageSubscriptionOptions options = DEFAULT_SER_OPTIONS; + private Schema schema; private volatile boolean completed = false; @@ -136,6 +139,7 @@ public synchronized void onCompleted() throws InterruptedException { } protected void parseSchema(final Schema header) { + this.schema = header; // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. @@ -194,8 +198,10 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i msg.addColumnData[ci].data = new ArrayList<>(); final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(ChunkInputStreamGenerator.extractChunkFromInputStream(options, factor, - columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, + acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], + schema.fields(ci)), + fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index d535ffd0254..571082227db 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,6 +19,8 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; @@ -26,6 +28,7 @@ import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; import java.io.IOException; import java.io.InputStream; @@ -51,8 +54,11 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; + private Schema schema; private BarrageMessage msg = null; + private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; } @@ -239,8 +245,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array acd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], schema.fields(ci)), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } @@ -270,8 +278,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array mcd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], null), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } @@ -282,6 +292,7 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages + this.schema = (Schema) header.header(new Schema()); return null; } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index f8933e6aee0..ab412a1d830 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -510,6 +510,10 @@ public Class[] computeWireComponentTypes() { return tableDef.getColumnStream() .map(ColumnDefinition::getComponentType).toArray(Class[]::new); } + + // public Field[] fields() { + // return + // } } private static void setConversionFactor( diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index e0e0b1f7741..8ff73e27d93 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -322,7 +322,7 @@ public MethodDescriptor getClientDoExchangeDescripto .build(); } - private class BarrageDataMarshaller implements MethodDescriptor.Marshaller { + private static class BarrageDataMarshaller implements MethodDescriptor.Marshaller { private final BarrageSnapshotOptions options; private final ChunkType[] columnChunkTypes; private final Class[] columnTypes; @@ -366,7 +366,6 @@ public void onError(@NotNull final Throwable t) { } } - /** * The Completable Future is used to encapsulate the concept that the table is filled with requested data. */ From 4f67239a6110da3c98eb16616328bbea5c66b7ed Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 06:59:52 -0500 Subject: [PATCH 067/219] Commit #2, mostly mechanical changes, splitting creation and reading --- .../chunk/ChunkInputStreamGenerator.java | 6 +- .../extensions/barrage/chunk/ChunkReader.java | 34 +++ .../barrage/chunk/ChunkReadingFactory.java | 36 +-- .../chunk/DefaultChunkReadingFactory.java | 256 ++++++++++-------- .../VarListChunkInputStreamGenerator.java | 28 +- .../VectorChunkInputStreamGenerator.java | 29 +- .../barrage/util/ArrowToTableConverter.java | 8 +- .../barrage/util/BarrageStreamReader.java | 31 ++- 8 files changed, 233 insertions(+), 195 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 98cf03908e8..a2ae09fb1d0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -197,7 +197,7 @@ static WritableChunk extractChunkFromInputStream( } @Deprecated - static WritableChunk extractChunkFromInputStream( + private static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, final ChunkType chunkType, final Class type, final Class componentType, @@ -206,8 +206,8 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, - bufferInfoIter, is, outChunk, outOffset, totalRows); + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null)) + .read(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java new file mode 100644 index 00000000000..de90744fc0d --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java @@ -0,0 +1,34 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * Consumes Flight/Barrage streams and transforms them into WritableChunks. + */ +public interface ChunkReader { + /** + * + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + */ + WritableChunk read(final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d7cfb18db00..d624f5bc736 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -67,50 +67,24 @@ public Field componentArrowField() { * @param options * @param factor * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param outOffset - * @param totalRows * @return * @throws IOException */ - WritableChunk extractChunkFromInputStream( + ChunkReader extractChunkFromInputStream( final StreamReaderOptions options, final int factor, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException; + final ChunkTypeInfo typeInfo) throws IOException; /** * * @param options * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param offset - * @param totalRows * @return * @throws IOException */ - default WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int offset, - final int totalRows) throws IOException { - return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, - totalRows); + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) + throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index e7a5bcd665c..6a7ceead2d2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -36,167 +36,201 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); @Override - public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo, Iterator fieldNodeIter, - PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, - int totalRows) throws IOException { + public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo) throws IOException { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); } if (typeInfo.type() == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); } if (typeInfo.type() == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); } if (typeInfo.type() == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); } throw new UnsupportedOperationException( "Do not yet support column conversion mode: " + options.columnConversionMode()); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index e1075e7dcf3..a1de362f385 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -244,10 +244,6 @@ static WritableObjectChunk extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; @@ -259,13 +255,18 @@ static WritableObjectChunk extractChunkFromInputStream( chunkType = ChunkType.fromElementType(componentType); } + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, - bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } } @@ -303,11 +304,8 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index b7bb8cee6a4..5ee91971c5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -244,22 +244,22 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { return outChunk.asWritableObjectChunk(); } @@ -300,13 +300,8 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index b0c62c652c1..f7f6dafbf27 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; @@ -198,11 +199,10 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i msg.addColumnData[ci].data = new ArrayList<>(); final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], - schema.fields(ci)), - fieldNodeIter, - bufferInfoIter, mi.inputStream, null, 0, 0)); + schema.fields(ci))); + acd.data.add(reader.read(fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 571082227db..134dfd17c3b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,12 +19,14 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; +import org.apache.arrow.flatbuf.Field; import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; @@ -37,6 +39,7 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import java.util.function.LongConsumer; @@ -54,10 +57,10 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; - private Schema schema; private BarrageMessage msg = null; private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + private final List readers = new ArrayList<>(); public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; @@ -244,12 +247,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - acd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], schema.fields(ci)), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), (int) batch.length())); + acd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } numAddRowsRead += batch.length(); @@ -277,12 +276,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - mcd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], null), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), numRowsToRead)); + mcd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } numModRowsRead += batch.length(); @@ -292,7 +287,15 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages - this.schema = (Schema) header.header(new Schema()); + Schema schema = new Schema(); + header.header(schema); + for (int i = 0; i < schema.fieldsLength(); i++) { + Field field = schema.fields(i); + ChunkReader chunkReader = chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], + componentTypes[i], field)); + readers.add(chunkReader); + } return null; } From bc2927f7c22457b83242261b60a672c70f21bcfe Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 08:06:43 -0500 Subject: [PATCH 068/219] Commit #3, create vector/array chunk readers to do type lookups once --- .../barrage/chunk/ChunkReadingFactory.java | 11 +- .../chunk/DefaultChunkReadingFactory.java | 18 +-- .../VarListChunkInputStreamGenerator.java | 90 -------------- .../barrage/chunk/VarListChunkReader.java | 114 ++++++++++++++++++ .../VectorChunkInputStreamGenerator.java | 87 ------------- .../barrage/chunk/VectorChunkReader.java | 110 +++++++++++++++++ 6 files changed, 230 insertions(+), 200 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d624f5bc736..d3f8ba84a95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -68,22 +68,17 @@ public Field componentArrowField() { * @param factor * @param typeInfo * @return - * @throws IOException */ - ChunkReader extractChunkFromInputStream( - final StreamReaderOptions options, - final int factor, - final ChunkTypeInfo typeInfo) throws IOException; + ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final int factor, + final ChunkTypeInfo typeInfo); /** * * @param options * @param typeInfo * @return - * @throws IOException */ - default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) - throws IOException { + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) { return extractChunkFromInputStream(options, 1, typeInfo); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index 6a7ceead2d2..df41a1ae7ca 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -4,8 +4,6 @@ package io.deephaven.extensions.barrage.chunk; import com.google.common.base.Charsets; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -13,8 +11,6 @@ import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; -import java.io.DataInput; -import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Instant; @@ -22,8 +18,6 @@ import java.time.LocalTime; import java.time.ZonedDateTime; import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; @@ -37,7 +31,7 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { @Override public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo) throws IOException { + ChunkTypeInfo typeInfo) { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: @@ -93,17 +87,11 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), outChunk, outOffset, totalRows); } else { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return new VarListChunkReader<>(options, typeInfo, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return new VectorChunkReader(options, typeInfo, this); } if (typeInfo.type() == BigInteger.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index a1de362f385..470b0b87291 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -233,95 +233,5 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = typeInfo.type().getComponentType(); - final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; - - final ChunkType chunkType; - if (componentType == boolean.class || componentType == Boolean.class) { - // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. - chunkType = ChunkType.Byte; - } else { - chunkType = ChunkType.fromElementType(componentType); - } - - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); - } - } - - final WritableObjectChunk chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java new file mode 100644 index 00000000000..71c294d6387 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java @@ -0,0 +1,114 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.array.ArrayExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +public class VarListChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VarListChunkReader"; + + private final ArrayExpansionKernel kernel; + private final ChunkReader componentReader; + + public VarListChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + final Class componentType = typeInfo.type().getComponentType(); + final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; + + final ChunkType chunkType; + if (componentType == boolean.class || componentType == Boolean.class) { + // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. + chunkType = ChunkType.Byte; + } else { + chunkType = ChunkType.fromElementType(componentType); + } + kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); + + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + } + + @Override + public WritableObjectChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); + } + } + + final WritableObjectChunk chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 5ee91971c5f..620ae84541f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -232,91 +232,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - static WritableObjectChunk, Values> extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - if (outChunk != null) { - return outChunk.asWritableObjectChunk(); - } - return WritableObjectChunk.makeWritableChunk(totalRows); - } - } - - final WritableObjectChunk, Values> chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java new file mode 100644 index 00000000000..5ed1f9b36e5 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java @@ -0,0 +1,110 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +class VectorChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VectorChunkReader"; + private final ChunkReader componentReader; + private final VectorExpansionKernel kernel; + + public VectorChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); + } + + @Override + public WritableObjectChunk, Values> read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + if (outChunk != null) { + return outChunk.asWritableObjectChunk(); + } + return WritableObjectChunk.makeWritableChunk(totalRows); + } + } + + final WritableObjectChunk, Values> chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} From 26957913325ff080f1d7e82cba8b48b2854fa0d7 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:44:40 -0500 Subject: [PATCH 069/219] Commit #4, replicate new chunk readers for primitives --- .../chunk/ByteChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ByteChunkReader.java | 204 ++++++++++++++++++ .../chunk/CharChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/CharChunkReader.java | 200 +++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 51 ++--- .../DoubleChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/DoubleChunkReader.java | 204 ++++++++++++++++++ .../chunk/FloatChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/FloatChunkReader.java | 204 ++++++++++++++++++ .../chunk/IntChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/IntChunkReader.java | 204 ++++++++++++++++++ .../chunk/LongChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/LongChunkReader.java | 204 ++++++++++++++++++ .../chunk/ShortChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ShortChunkReader.java | 204 ++++++++++++++++++ 15 files changed, 1437 insertions(+), 1417 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index c2cc7f3e453..d334e031bed 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToByteFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.WritableByteChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ByteChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableByteChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableByteChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - byte value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableByteChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableByteChunk::makeWritableChunk, - WritableChunk::asWritableByteChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset) throws IOException { - if (conversion == ByteConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readByte()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final byte in = is.readByte(); - final byte out = in == NULL_BYTE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readByte())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java new file mode 100644 index 00000000000..29bee0fea05 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_BYTE; + +public class ByteChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ByteChunkReader"; + private final StreamReaderOptions options; + private final ByteConversion conversion; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + public ByteChunkReader(StreamReaderOptions options) { + this(options, ByteConversion.IDENTITY); + } + + public ByteChunkReader(StreamReaderOptions options, ByteConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableByteChunk inner = ByteChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + byte value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableByteChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableByteChunk::makeWritableChunk, + WritableChunk::asWritableByteChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset) throws IOException { + if (conversion == ByteConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readByte()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final byte in = is.readByte(); + final byte out = in == NULL_BYTE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readByte())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 878bc0a6cd6..83b1f2f72f1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -3,10 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToCharFunction; @@ -17,17 +14,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.CharChunk; import io.deephaven.chunk.WritableCharChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -163,192 +154,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("CharChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface CharConversion { - char apply(char in); - - CharConversion IDENTITY = (char a) -> a; - } - - static WritableCharChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableCharChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - char value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableCharChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final CharConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableCharChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableCharChunk::makeWritableChunk, - WritableChunk::asWritableCharChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset) throws IOException { - if (conversion == CharConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readChar()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final char in = is.readChar(); - final char out = in == NULL_CHAR ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readChar())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java new file mode 100644 index 00000000000..b6fce96ffbf --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java @@ -0,0 +1,200 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableCharChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_CHAR; + +public class CharChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "CharChunkReader"; + private final StreamReaderOptions options; + private final CharConversion conversion; + + @FunctionalInterface + public interface CharConversion { + char apply(char in); + + CharConversion IDENTITY = (char a) -> a; + } + + public CharChunkReader(StreamReaderOptions options) { + this(options, CharConversion.IDENTITY); + } + + public CharChunkReader(StreamReaderOptions options, CharConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableCharChunk inner = CharChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + char value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableCharChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableCharChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableCharChunk::makeWritableChunk, + WritableChunk::asWritableCharChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset) throws IOException { + if (conversion == CharConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readChar()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final char in = is.readChar(); + final char out = in == NULL_CHAR ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readChar())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index df41a1ae7ca..bbf5b398fe7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -37,45 +37,28 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ByteChunkReader(options); case Short: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ShortChunkReader(options); case Int: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new IntChunkReader(options); case Long: if (factor == 1) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor)); case Float: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new FloatChunkReader(options); case Double: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new DoubleChunkReader(options); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { @@ -196,20 +179,12 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform(value -> value == QueryConstants.NULL_LONG ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY)); } if (typeInfo.type() == LocalTime.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform( + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value)); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index c5283a02364..a0046b67edb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToDoubleFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.WritableDoubleChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("DoubleChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface DoubleConversion { - double apply(double in); - - DoubleConversion IDENTITY = (double a) -> a; - } - - static WritableDoubleChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableDoubleChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - double value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final DoubleConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableDoubleChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableDoubleChunk::makeWritableChunk, - WritableChunk::asWritableDoubleChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset) throws IOException { - if (conversion == DoubleConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readDouble()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final double in = is.readDouble(); - final double out = in == NULL_DOUBLE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readDouble())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java new file mode 100644 index 00000000000..4b72273272b --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableDoubleChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_DOUBLE; + +public class DoubleChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "DoubleChunkReader"; + private final StreamReaderOptions options; + private final DoubleConversion conversion; + + @FunctionalInterface + public interface DoubleConversion { + double apply(double in); + + DoubleConversion IDENTITY = (double a) -> a; + } + + public DoubleChunkReader(StreamReaderOptions options) { + this(options, DoubleConversion.IDENTITY); + } + + public DoubleChunkReader(StreamReaderOptions options, DoubleConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableDoubleChunk inner = DoubleChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + double value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableDoubleChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableDoubleChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableDoubleChunk::makeWritableChunk, + WritableChunk::asWritableDoubleChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset) throws IOException { + if (conversion == DoubleConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readDouble()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final double in = is.readDouble(); + final double out = in == NULL_DOUBLE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readDouble())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 19b52593bff..edd8aaccb2a 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToFloatFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.FloatChunk; import io.deephaven.chunk.WritableFloatChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("FloatChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface FloatConversion { - float apply(float in); - - FloatConversion IDENTITY = (float a) -> a; - } - - static WritableFloatChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableFloatChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - float value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final FloatConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableFloatChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableFloatChunk::makeWritableChunk, - WritableChunk::asWritableFloatChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset) throws IOException { - if (conversion == FloatConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readFloat()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final float in = is.readFloat(); - final float out = in == NULL_FLOAT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readFloat())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java new file mode 100644 index 00000000000..6d434226235 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableFloatChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_FLOAT; + +public class FloatChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "FloatChunkReader"; + private final StreamReaderOptions options; + private final FloatConversion conversion; + + @FunctionalInterface + public interface FloatConversion { + float apply(float in); + + FloatConversion IDENTITY = (float a) -> a; + } + + public FloatChunkReader(StreamReaderOptions options) { + this(options, FloatConversion.IDENTITY); + } + + public FloatChunkReader(StreamReaderOptions options, FloatConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableFloatChunk inner = FloatChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + float value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableFloatChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableFloatChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableFloatChunk::makeWritableChunk, + WritableChunk::asWritableFloatChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset) throws IOException { + if (conversion == FloatConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readFloat()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final float in = is.readFloat(); + final float out = in == NULL_FLOAT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readFloat())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 91714f4dd43..87bc61b8c6d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToIntFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("IntChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface IntConversion { - int apply(int in); - - IntConversion IDENTITY = (int a) -> a; - } - - static WritableIntChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableIntChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - int value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableIntChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final IntConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableIntChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableIntChunk::makeWritableChunk, - WritableChunk::asWritableIntChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset) throws IOException { - if (conversion == IntConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readInt()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final int in = is.readInt(); - final int out = in == NULL_INT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readInt())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java new file mode 100644 index 00000000000..39bce48735c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_INT; + +public class IntChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "IntChunkReader"; + private final StreamReaderOptions options; + private final IntConversion conversion; + + @FunctionalInterface + public interface IntConversion { + int apply(int in); + + IntConversion IDENTITY = (int a) -> a; + } + + public IntChunkReader(StreamReaderOptions options) { + this(options, IntConversion.IDENTITY); + } + + public IntChunkReader(StreamReaderOptions options, IntConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableIntChunk inner = IntChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + int value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableIntChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableIntChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableIntChunk::makeWritableChunk, + WritableChunk::asWritableIntChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset) throws IOException { + if (conversion == IntConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readInt()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final int in = is.readInt(); + final int out = in == NULL_INT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readInt())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index a28c4006d1d..671d972ccce 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToLongFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("LongChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface LongConversion { - long apply(long in); - - LongConversion IDENTITY = (long a) -> a; - } - - static WritableLongChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableLongChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - long value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableLongChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final LongConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableLongChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableLongChunk::makeWritableChunk, - WritableChunk::asWritableLongChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset) throws IOException { - if (conversion == LongConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readLong()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final long in = is.readLong(); - final long out = in == NULL_LONG ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readLong())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java new file mode 100644 index 00000000000..743e0a37c8f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_LONG; + +public class LongChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "LongChunkReader"; + private final StreamReaderOptions options; + private final LongConversion conversion; + + @FunctionalInterface + public interface LongConversion { + long apply(long in); + + LongConversion IDENTITY = (long a) -> a; + } + + public LongChunkReader(StreamReaderOptions options) { + this(options, LongConversion.IDENTITY); + } + + public LongChunkReader(StreamReaderOptions options, LongConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableLongChunk inner = LongChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + long value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableLongChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableLongChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableLongChunk::makeWritableChunk, + WritableChunk::asWritableLongChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset) throws IOException { + if (conversion == LongConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readLong()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final long in = is.readLong(); + final long out = in == NULL_LONG ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readLong())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 68a2ecf86b1..4fd81b47d03 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToShortFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.WritableShortChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ShortChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ShortConversion { - short apply(short in); - - ShortConversion IDENTITY = (short a) -> a; - } - - static WritableShortChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableShortChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - short value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableShortChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ShortConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableShortChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableShortChunk::makeWritableChunk, - WritableChunk::asWritableShortChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset) throws IOException { - if (conversion == ShortConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readShort()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final short in = is.readShort(); - final short out = in == NULL_SHORT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readShort())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java new file mode 100644 index 00000000000..56c17c2c11f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableShortChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_SHORT; + +public class ShortChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ShortChunkReader"; + private final StreamReaderOptions options; + private final ShortConversion conversion; + + @FunctionalInterface + public interface ShortConversion { + short apply(short in); + + ShortConversion IDENTITY = (short a) -> a; + } + + public ShortChunkReader(StreamReaderOptions options) { + this(options, ShortConversion.IDENTITY); + } + + public ShortChunkReader(StreamReaderOptions options, ShortConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableShortChunk inner = ShortChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + short value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableShortChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableShortChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableShortChunk::makeWritableChunk, + WritableChunk::asWritableShortChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset) throws IOException { + if (conversion == ShortConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readShort()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final short in = is.readShort(); + final short out = in == NULL_SHORT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readShort())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} From 8c2801089c94f1d7fba9ca8ac1e7054e936acc89 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:56:21 -0500 Subject: [PATCH 070/219] Commit #5, also boolean chunk reader --- .../BooleanChunkInputStreamGenerator.java | 126 ---------------- .../barrage/chunk/BooleanChunkReader.java | 141 ++++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 4 +- 3 files changed, 142 insertions(+), 129 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java index 8c1503b67f8..e1f478ef5ef 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java @@ -153,130 +153,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - - static WritableChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk; - if (outChunk != null) { - chunk = outChunk.asWritableByteChunk(); - } else { - final int numRows = Math.max(totalRows, nodeInfo.numElements); - chunk = WritableByteChunk.makeWritableChunk(numRows); - chunk.setSize(numRows); - } - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); - if (payloadBuffer < numPayloadBytesNeeded) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } - - // cannot use deephaven nulls as booleans are not nullable - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - - // flight requires that the payload buffer be padded to multiples of 8 bytes - final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - long payloadWord = is.readLong(); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE - : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; - chunk.set(offset + ei++, conversion.apply(value)); - validityWord >>= 1; - payloadWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - payloadWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java new file mode 100644 index 00000000000..da0cc96cad4 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java @@ -0,0 +1,141 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.util.BooleanUtils; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.BaseChunkInputStreamGenerator.getNumLongsForBitPackOfSize; + +public class BooleanChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "BooleanChunkReader"; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + private final ByteConversion conversion; + + public BooleanChunkReader() { + this(ByteConversion.IDENTITY); + } + + public BooleanChunkReader(ByteConversion conversion) { + this.conversion = conversion; + } + + @Override + public WritableChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk; + if (outChunk != null) { + chunk = outChunk.asWritableByteChunk(); + } else { + final int numRows = Math.max(totalRows, nodeInfo.numElements); + chunk = WritableByteChunk.makeWritableChunk(numRows); + chunk.setSize(numRows); + } + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); + if (payloadBuffer < numPayloadBytesNeeded) { + throw new IllegalStateException("payload buffer is too short for expected number of elements"); + } + + // cannot use deephaven nulls as booleans are not nullable + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + + // flight requires that the payload buffer be padded to multiples of 8 bytes + final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + long payloadWord = is.readLong(); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE + : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; + chunk.set(offset + ei++, conversion.apply(value)); + validityWord >>= 1; + payloadWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + payloadWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index bbf5b398fe7..d03a0758701 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -40,9 +40,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new BooleanChunkReader(); } return new ByteChunkReader(options); case Short: From 8a52c6459c0aa761640e18ab8c67fad419ca5271 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 16:02:44 -0500 Subject: [PATCH 071/219] Rough cut of JS impl of chunk reader factory --- .../VarBinaryChunkInputStreamGenerator.java | 2 +- .../api/barrage/WebChunkReaderFactory.java | 144 +++++++++++++++++- 2 files changed, 144 insertions(+), 2 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java index 54981ece538..35b17559c73 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java @@ -397,7 +397,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( + public static WritableObjectChunk extractChunkFromInputStream( final DataInput is, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index 59ddbee5439..a651233fea1 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -3,13 +3,155 @@ // package io.deephaven.web.client.api.barrage; +import io.deephaven.extensions.barrage.chunk.BooleanChunkReader; +import io.deephaven.extensions.barrage.chunk.ByteChunkReader; +import io.deephaven.extensions.barrage.chunk.CharChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DoubleChunkReader; +import io.deephaven.extensions.barrage.chunk.FloatChunkReader; +import io.deephaven.extensions.barrage.chunk.IntChunkReader; +import io.deephaven.extensions.barrage.chunk.LongChunkReader; +import io.deephaven.extensions.barrage.chunk.VarBinaryChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.VarListChunkReader; import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import org.apache.arrow.flatbuf.Date; +import org.apache.arrow.flatbuf.DateUnit; +import org.apache.arrow.flatbuf.FloatingPoint; +import org.apache.arrow.flatbuf.Int; +import org.apache.arrow.flatbuf.Precision; +import org.apache.arrow.flatbuf.Time; +import org.apache.arrow.flatbuf.TimeUnit; +import org.apache.arrow.flatbuf.Timestamp; +import org.apache.arrow.flatbuf.Type; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; public class WebChunkReaderFactory implements ChunkReadingFactory { @Override public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, ChunkTypeInfo typeInfo) { - return null; + switch (typeInfo.arrowField().typeType()) { + case Type.Int: { + Int t = new Int(); + typeInfo.arrowField().type(t); + switch (t.bitWidth()) { + case 8: { + return new ByteChunkReader(options); + } + case 16: { + return new CharChunkReader(options); + } + case 32: { + return new IntChunkReader(options); + } + case 64: { + return new LongChunkReader(options); + } + default: + throw new IllegalArgumentException("Unsupported Int bitwidth: " + t.bitWidth()); + } + } + case Type.FloatingPoint: { + FloatingPoint t = new FloatingPoint(); + typeInfo.arrowField().type(t); + switch (t.precision()) { + case Precision.SINGLE: { + return new FloatChunkReader(options); + } + case Precision.DOUBLE: { + return new DoubleChunkReader(options); + } + default: + throw new IllegalArgumentException("Unsupported FloatingPoint precision " + Precision.name(t.precision())); + } + } + case Type.Binary: { + if (typeInfo.type() == BigInteger.class) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == BigDecimal.class) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); + } + } + case Type.Utf8: { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> + VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, bufferInfoIter, (buf, off, len) -> new String(buf, off, len, StandardCharsets.UTF_8), outChunk, outOffset, totalRows); + } + case Type.Bool: { + return new BooleanChunkReader(); + } + case Type.Date: { + Date t = new Date(); + typeInfo.arrowField().type(t); + switch (t.unit()) { + case DateUnit.MILLISECOND: + return new LongChunkReader(options);//TODO transform + default: + throw new IllegalArgumentException("Unsupported Date unit: " + DateUnit.name(t.unit())); + } + } + case Type.Time: { + Time t = new Time(); + typeInfo.arrowField().type(t); + switch (t.bitWidth()) { + case TimeUnit.NANOSECOND: { + return new LongChunkReader(options);//TODO transform + } + default: + throw new IllegalArgumentException("Unsupported Time unit: " + TimeUnit.name(t.unit())); + } + } + case Type.Timestamp: { + Timestamp t = new Timestamp(); + typeInfo.arrowField().type(t); + switch (t.unit()) { + case TimeUnit.NANOSECOND: { + if (!t.timezone().equals("UTC")) { + throw new IllegalArgumentException("Unsupported tz " + t.timezone()); + } + return new LongChunkReader(options);//TODO transform + } + default: + throw new IllegalArgumentException("Unsupported Timestamp unit: " + TimeUnit.name(t.unit())); + } + } + case Type.List: { + if (typeInfo.componentType() == byte.class) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); + } + return new VarListChunkReader<>(options, typeInfo, this); + } + default: + throw new IllegalArgumentException("Unsupported type: " + Type.name(typeInfo.arrowField().typeType())); + } } } From 70e0a3543edeb67b700d0a65ee4af90d00b42103 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 06:31:45 -0500 Subject: [PATCH 072/219] Persist reader across messages --- .../web/client/api/subscription/AbstractTableSubscription.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 89df37beb83..458fca47fb6 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -412,8 +412,9 @@ protected void onViewportChange(RangeSet serverViewport, BitSet serverColumns, b || (serverColumns != null && serverColumns.equals(this.columnBitSet)); } + private final WebBarrageStreamReader reader = new WebBarrageStreamReader(); + private void onFlightData(FlightData data) { - WebBarrageStreamReader reader = new WebBarrageStreamReader(); WebBarrageMessage message; try { message = reader.parseFrom(options, null, state.chunkTypes(), state.columnTypes(), state.componentTypes(), From d835d03488525c3463db00b9faef0477541babac Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 06:32:10 -0500 Subject: [PATCH 073/219] Use DateWrapper type where the server sends Instant --- .../deephaven/web/client/state/ClientTableState.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index b6b6312e01b..4414ac45dc4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -25,9 +25,7 @@ import io.deephaven.web.shared.data.*; import io.deephaven.web.shared.fu.*; import jsinterop.base.Js; -import org.apache.arrow.flatbuf.Schema; -import java.time.Instant; import java.util.*; import java.util.stream.Collectors; @@ -66,10 +64,10 @@ public ChunkType[] chunkTypes() { if (dataType == Boolean.class || dataType == boolean.class) { return ChunkType.Byte; } - // if (dataType == Instant.class) { - // // Note that storing ZonedDateTime as a primitive is lossy on the time zone. - // return ChunkType.Long; - // } + if (dataType == DateWrapper.class) { + // Note that storing ZonedDateTime as a primitive is lossy on the time zone. + return ChunkType.Long; + } return ChunkType.fromElementType(dataType); }).toArray(ChunkType[]::new); } @@ -100,6 +98,8 @@ public Class[] columnTypes() { case "java.lang.Double": case "double": return double.class; + case "java.time.Instant": + return DateWrapper.class; default: return Object.class; } From ccebd6ae83345656dd977257e4622cfd063c99e9 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 06:32:33 -0500 Subject: [PATCH 074/219] Fix LE DataInputStream impl to correctly read bytes as unsigned --- .../io/LittleEndianDataInputStream.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java index 4d7ce7b44df..e1f587ac72e 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/com/google/common/io/LittleEndianDataInputStream.java @@ -92,7 +92,7 @@ public int readUnsignedShort() throws IOException { byte b2 = readAndCheckByte(); int result = b2; - result = (result << 8) | b1; + result = (result << 8) | (b1 & 0xFF); return result; } @@ -105,9 +105,9 @@ public int readInt() throws IOException { byte b4 = readAndCheckByte(); int result = b4; - result = (result << 8) | b3; - result = (result << 8) | b2; - result = (result << 8) | b1; + result = (result << 8) | (b3 & 0xFF); + result = (result << 8) | (b2 & 0xFF); + result = (result << 8) | (b1 & 0xFF); return result; } @@ -123,14 +123,14 @@ public long readLong() throws IOException { byte b7 = readAndCheckByte(); byte b8 = readAndCheckByte(); - int result = b8; - result = (result << 8) | b7; - result = (result << 8) | b6; - result = (result << 8) | b5; - result = (result << 8) | b4; - result = (result << 8) | b3; - result = (result << 8) | b2; - result = (result << 8) | b1; + long result = b8; + result = (result << 8) | (b7 & 0xFF); + result = (result << 8) | (b6 & 0xFF); + result = (result << 8) | (b5 & 0xFF); + result = (result << 8) | (b4 & 0xFF); + result = (result << 8) | (b3 & 0xFF); + result = (result << 8) | (b2 & 0xFF); + result = (result << 8) | (b1 & 0xFF); return result; } From 86d5ee5b3fba49334770add1044660384fe9f6ed Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 06:33:11 -0500 Subject: [PATCH 075/219] spotless --- .../api/barrage/WebChunkReaderFactory.java | 63 ++++++++++--------- .../AbstractTableSubscription.java | 1 - 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index a651233fea1..b4f3df7edb4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -65,40 +65,43 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int return new DoubleChunkReader(options); } default: - throw new IllegalArgumentException("Unsupported FloatingPoint precision " + Precision.name(t.precision())); + throw new IllegalArgumentException( + "Unsupported FloatingPoint precision " + Precision.name(t.precision())); } } case Type.Binary: { if (typeInfo.type() == BigInteger.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); } if (typeInfo.type() == BigDecimal.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); } } case Type.Utf8: { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> - VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, bufferInfoIter, (buf, off, len) -> new String(buf, off, len, StandardCharsets.UTF_8), outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, + bufferInfoIter, (buf, off, len) -> new String(buf, off, len, StandardCharsets.UTF_8), + outChunk, outOffset, totalRows); } case Type.Bool: { return new BooleanChunkReader(); @@ -108,7 +111,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int typeInfo.arrowField().type(t); switch (t.unit()) { case DateUnit.MILLISECOND: - return new LongChunkReader(options);//TODO transform + return new LongChunkReader(options);// TODO transform default: throw new IllegalArgumentException("Unsupported Date unit: " + DateUnit.name(t.unit())); } @@ -118,7 +121,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int typeInfo.arrowField().type(t); switch (t.bitWidth()) { case TimeUnit.NANOSECOND: { - return new LongChunkReader(options);//TODO transform + return new LongChunkReader(options);// TODO transform } default: throw new IllegalArgumentException("Unsupported Time unit: " + TimeUnit.name(t.unit())); @@ -132,7 +135,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int if (!t.timezone().equals("UTC")) { throw new IllegalArgumentException("Unsupported tz " + t.timezone()); } - return new LongChunkReader(options);//TODO transform + return new LongChunkReader(options);// TODO transform } default: throw new IllegalArgumentException("Unsupported Timestamp unit: " + TimeUnit.name(t.unit())); @@ -141,12 +144,12 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int case Type.List: { if (typeInfo.componentType() == byte.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); } return new VarListChunkReader<>(options, typeInfo, this); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 458fca47fb6..ffa52fff234 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -432,7 +432,6 @@ protected void onStreamEnd(ResponseStreamWrapper.Status status) { // TODO handle stream end/error } - /** * The columns that were subscribed to when this subscription was created * From 9131a11912fc4919097082d4ac5922a7ffefc194 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 06:33:18 -0500 Subject: [PATCH 076/219] Enable DH nulls --- .../web/client/api/subscription/AbstractTableSubscription.java | 1 + 1 file changed, 1 insertion(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index ffa52fff234..1e0db55b990 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -167,6 +167,7 @@ protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray .columnConversionMode(ColumnConversionMode.Stringify) .minUpdateIntervalMs(updateIntervalMs == null ? 0 : (int) (double) updateIntervalMs) .columnsAsList(false)// TODO flip this to true + .useDeephavenNulls(true) .build(); FlatBufferBuilder request = subscriptionRequest( Js.uncheckedCast(state.getHandle().getTicket()), From 0d9f9945b22d2870c65a334f72014c617031b19b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 08:31:44 -0500 Subject: [PATCH 077/219] Null handling in client wrappers --- .../java/io/deephaven/web/client/api/DateWrapper.java | 4 ++++ .../java/io/deephaven/web/client/api/LongWrapper.java | 4 ++++ .../web/client/api/barrage/WebChunkReaderFactory.java | 10 ++++++---- .../deephaven/web/client/state/ClientTableState.java | 11 ++++++++--- 4 files changed, 22 insertions(+), 7 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/DateWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/DateWrapper.java index 4e6580b57c0..f53d2a35928 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/DateWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/DateWrapper.java @@ -4,6 +4,7 @@ package io.deephaven.web.client.api; import elemental2.core.JsDate; +import io.deephaven.util.QueryConstants; import io.deephaven.web.client.api.i18n.JsDateTimeFormat; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsType; @@ -17,6 +18,9 @@ public DateWrapper(long valueInNanos) { @JsIgnore public static DateWrapper of(long dateInNanos) { + if (dateInNanos == QueryConstants.NULL_LONG) { + return null; + } return new DateWrapper(dateInNanos); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/LongWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/LongWrapper.java index ec679c0442a..ea2647184b0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/LongWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/LongWrapper.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api; +import io.deephaven.util.QueryConstants; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsType; @@ -12,6 +13,9 @@ public class LongWrapper { @JsIgnore public static LongWrapper of(long value) { + if (value == QueryConstants.NULL_LONG) { + return null; + } return new LongWrapper(value); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index b4f3df7edb4..1a9975f9cf5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -15,6 +15,8 @@ import io.deephaven.extensions.barrage.chunk.VarBinaryChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.VarListChunkReader; import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.web.client.api.DateWrapper; +import io.deephaven.web.client.api.LongWrapper; import org.apache.arrow.flatbuf.Date; import org.apache.arrow.flatbuf.DateUnit; import org.apache.arrow.flatbuf.FloatingPoint; @@ -48,7 +50,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int return new IntChunkReader(options); } case 64: { - return new LongChunkReader(options); + return new LongChunkReader(options).transform(LongWrapper::of); } default: throw new IllegalArgumentException("Unsupported Int bitwidth: " + t.bitWidth()); @@ -111,7 +113,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int typeInfo.arrowField().type(t); switch (t.unit()) { case DateUnit.MILLISECOND: - return new LongChunkReader(options);// TODO transform + return new LongChunkReader(options).transform(millis -> DateWrapper.of(millis * 1000 * 1000)); default: throw new IllegalArgumentException("Unsupported Date unit: " + DateUnit.name(t.unit())); } @@ -121,7 +123,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int typeInfo.arrowField().type(t); switch (t.bitWidth()) { case TimeUnit.NANOSECOND: { - return new LongChunkReader(options);// TODO transform + return new LongChunkReader(options).transform(DateWrapper::of); } default: throw new IllegalArgumentException("Unsupported Time unit: " + TimeUnit.name(t.unit())); @@ -135,7 +137,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int if (!t.timezone().equals("UTC")) { throw new IllegalArgumentException("Unsupported tz " + t.timezone()); } - return new LongChunkReader(options);// TODO transform + return new LongChunkReader(options).transform(DateWrapper::of); } default: throw new IllegalArgumentException("Unsupported Timestamp unit: " + TimeUnit.name(t.unit())); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 4414ac45dc4..40246775a56 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -64,9 +64,14 @@ public ChunkType[] chunkTypes() { if (dataType == Boolean.class || dataType == boolean.class) { return ChunkType.Byte; } - if (dataType == DateWrapper.class) { - // Note that storing ZonedDateTime as a primitive is lossy on the time zone. - return ChunkType.Long; + // JS client holds date objects as objects, not as longs +// if (dataType == DateWrapper.class) { +// // Note that storing ZonedDateTime as a primitive is lossy on the time zone. +// return ChunkType.Long; +// } + if (dataType == Long.class || dataType == long.class) { + // JS client holds longs as LongWrappers + return ChunkType.Object; } return ChunkType.fromElementType(dataType); }).toArray(ChunkType[]::new); From 4266b0828cf728e490ff9500b6f72c43077e5879 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 08:38:02 -0500 Subject: [PATCH 078/219] Phase out client-specific contants types --- .../web/client/api/parse/JsDataHandler.java | 20 +++++++++--------- .../api/subscription/QueryConstants.java | 21 ------------------- 2 files changed, 10 insertions(+), 31 deletions(-) delete mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/subscription/QueryConstants.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java index 1c6c9f0766a..4bb583cd86f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/parse/JsDataHandler.java @@ -40,16 +40,16 @@ import java.util.List; import java.util.Map; -import static io.deephaven.web.client.api.subscription.QueryConstants.FALSE_BOOLEAN_AS_BYTE; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_BOOLEAN_AS_BYTE; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_BYTE; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_CHAR; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_DOUBLE; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_FLOAT; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_INT; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_LONG; -import static io.deephaven.web.client.api.subscription.QueryConstants.NULL_SHORT; -import static io.deephaven.web.client.api.subscription.QueryConstants.TRUE_BOOLEAN_AS_BYTE; +import static io.deephaven.util.BooleanUtils.FALSE_BOOLEAN_AS_BYTE; +import static io.deephaven.util.BooleanUtils.NULL_BOOLEAN_AS_BYTE; +import static io.deephaven.util.BooleanUtils.TRUE_BOOLEAN_AS_BYTE; +import static io.deephaven.util.QueryConstants.NULL_BYTE; +import static io.deephaven.util.QueryConstants.NULL_CHAR; +import static io.deephaven.util.QueryConstants.NULL_DOUBLE; +import static io.deephaven.util.QueryConstants.NULL_FLOAT; +import static io.deephaven.util.QueryConstants.NULL_INT; +import static io.deephaven.util.QueryConstants.NULL_LONG; +import static io.deephaven.util.QueryConstants.NULL_SHORT; /** * Given the expected type of a column, pick one of the enum entries and use that to read the data into arrow buffers. diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/QueryConstants.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/QueryConstants.java deleted file mode 100644 index e49eb7c2846..00000000000 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/QueryConstants.java +++ /dev/null @@ -1,21 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.client.api.subscription; - - -/** - * Constants for null values within the Deephaven engine From io.deephaven.util.QueryConstants - */ -public interface QueryConstants { - char NULL_CHAR = Character.MAX_VALUE; - byte NULL_BYTE = Byte.MIN_VALUE; - short NULL_SHORT = Short.MIN_VALUE; - int NULL_INT = Integer.MIN_VALUE; - long NULL_LONG = Long.MIN_VALUE; - float NULL_FLOAT = -Float.MAX_VALUE; - double NULL_DOUBLE = -Double.MAX_VALUE; - byte NULL_BOOLEAN_AS_BYTE = NULL_BYTE; - byte TRUE_BOOLEAN_AS_BYTE = (byte) 1; - byte FALSE_BOOLEAN_AS_BYTE = (byte) 0; -} From 57225271e74f858fa49095deba4799dbbeb1b54e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:21:25 -0500 Subject: [PATCH 079/219] Remove dead class --- .../util/BarrageChunkAppendingMarshaller.java | 206 ------------------ .../test/FlightMessageRoundTripTest.java | 48 ---- 2 files changed, 254 deletions(-) delete mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java deleted file mode 100644 index edbf60d72a1..00000000000 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java +++ /dev/null @@ -1,206 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.extensions.barrage.util; - -import com.google.common.io.LittleEndianDataInputStream; -import com.google.protobuf.CodedInputStream; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.BarrageSnapshotOptions; -import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; -import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.chunk.ChunkType; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.grpc.MethodDescriptor; -import io.grpc.protobuf.ProtoUtils; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; -import org.apache.arrow.flight.impl.Flight; -import org.apache.arrow.flight.impl.FlightServiceGrpc; - -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; - -/** - * This class is used to append the results of a DoGet directly into destination {@link WritableChunk}. - *

- * It will append the results of a DoGet into the destination chunks, and notify the listener of the number of rows - * appended to the record batch in total. The user will typically want to wait for OnCompletion to be called before - * assuming they have received all the data. - */ -public class BarrageChunkAppendingMarshaller implements MethodDescriptor.Marshaller { - - /** - * Fetch the client side descriptor for a specific DoGet invocation. - *

- * Instead of providing BarrageMessage as the response type, this custom marshaller will return the number of rows - * appended after each RecordBatch. This is informative yet hands-off process reading data into the chunks. - * - * @param columnChunkTypes the chunk types per column - * @param columnTypes the class type per column - * @param componentTypes the component class type per column - * @param destChunks the destination chunks - * @return the client side method descriptor - */ - public static MethodDescriptor getClientDoGetDescriptor( - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - final MethodDescriptor.Marshaller requestMarshaller = - ProtoUtils.marshaller(Flight.Ticket.getDefaultInstance()); - final MethodDescriptor descriptor = FlightServiceGrpc.getDoGetMethod(); - - return MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.SERVER_STREAMING) - .setFullMethodName(descriptor.getFullMethodName()) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(new BarrageChunkAppendingMarshaller( - BARRAGE_OPTIONS, columnChunkTypes, columnTypes, componentTypes, destChunks)) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(); - } - - // DoGet does not get to set any options - private static final BarrageSnapshotOptions BARRAGE_OPTIONS = BarrageSnapshotOptions.builder().build(); - - private static final Logger log = LoggerFactory.getLogger(BarrageChunkAppendingMarshaller.class); - - private final BarrageSnapshotOptions options; - - private final ChunkType[] columnChunkTypes; - private final Class[] columnTypes; - private final Class[] componentTypes; - - private final WritableChunk[] destChunks; - private long numRowsRead = 0; - - public BarrageChunkAppendingMarshaller( - final BarrageSnapshotOptions options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - this.options = options; - this.columnChunkTypes = columnChunkTypes; - this.columnTypes = columnTypes; - this.componentTypes = componentTypes; - this.destChunks = destChunks; - } - - @Override - public InputStream stream(final Integer value) { - throw new UnsupportedOperationException( - "BarrageDataMarshaller unexpectedly used to directly convert BarrageMessage to InputStream"); - } - - @Override - public Integer parse(final InputStream stream) { - Message header = null; - try { - boolean bodyParsed = false; - - final CodedInputStream decoder = CodedInputStream.newInstance(stream); - - for (int tag = decoder.readTag(); tag != 0; tag = decoder.readTag()) { - if (tag == BarrageProtoUtil.DATA_HEADER_TAG) { - final int size = decoder.readRawVarint32(); - header = Message.getRootAsMessage(ByteBuffer.wrap(decoder.readRawBytes(size))); - continue; - } else if (tag != BarrageProtoUtil.BODY_TAG) { - decoder.skipField(tag); - continue; - } - - if (bodyParsed) { - // although not an error for protobuf, arrow payloads should consider it one - throw new IllegalStateException("Unexpected duplicate body tag"); - } - - if (header == null) { - throw new IllegalStateException("Missing metadata header; cannot decode body"); - } - - if (header.headerType() != org.apache.arrow.flatbuf.MessageHeader.RecordBatch) { - throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); - } - - bodyParsed = true; - final int size = decoder.readRawVarint32(); - final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); - - // noinspection UnstableApiUsage - try (final LittleEndianDataInputStream ois = - new LittleEndianDataInputStream(new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size))) { - final Iterator fieldNodeIter = - new FlatBufferIteratorAdapter<>(batch.nodesLength(), - i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); - - final long[] bufferInfo = new long[batch.buffersLength()]; - for (int bi = 0; bi < batch.buffersLength(); ++bi) { - int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); - int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); - if (bi < batch.buffersLength() - 1) { - final int nextOffset = - LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); - // our parsers handle overhanging buffers - length += Math.max(0, nextOffset - offset - length); - } - bufferInfo[bi] = length; - } - final PrimitiveIterator.OfLong bufferInfoIter = Arrays.stream(bufferInfo).iterator(); - - for (int ci = 0; ci < destChunks.length; ++ci) { - final WritableChunk dest = destChunks[ci]; - - final long remaining = dest.capacity() - dest.size(); - if (batch.length() > remaining) { - throw new BarrageMarshallingException(String.format("Received RecordBatch length (%d) " + - "exceeds the remaining capacity (%d) of the destination Chunk.", batch.length(), - remaining)); - } - - // Barrage should return the provided chunk since there was enough room to append the data - final WritableChunk retChunk = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, - bufferInfoIter, ois, dest, dest.size(), (int) batch.length()); - - if (retChunk != dest) { - throw new BarrageMarshallingException("Unexpected chunk returned from " + - "ChunkInputStreamGenerator.extractChunkFromInputStream"); - } - - // barrage does not alter the destination chunk size, so let's set it ourselves - dest.setSize(dest.size() + (int) batch.length()); - } - numRowsRead += batch.length(); - } - } - - if (header != null && header.headerType() == MessageHeader.Schema) { - // getting started, but no rows yet; schemas do not have body tags - return 0; - } - - if (!bodyParsed) { - throw new IllegalStateException("Missing body tag"); - } - - // we're appending directly to the chunk, but courteously let our user know how many rows were read - return (int) numRowsRead; - } catch (final Exception e) { - log.error().append("Unable to parse a received DoGet: ").append(e).endl(); - if (e instanceof BarrageMarshallingException) { - throw (BarrageMarshallingException) e; - } - throw new GrpcMarshallingException("Unable to parse DoGet", e); - } - } -} diff --git a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java index 95646536910..bbc47c6f1d3 100644 --- a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java +++ b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java @@ -38,7 +38,6 @@ import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; -import io.deephaven.extensions.barrage.util.BarrageChunkAppendingMarshaller; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.io.logger.LogBuffer; import io.deephaven.io.logger.LogBufferGlobal; @@ -1065,53 +1064,6 @@ private void assertRoundTripDataEqual(Table deephavenTable, Consumer[] wireTypes = convertedSchema.computeWireTypes(); - final Class[] wireComponentTypes = convertedSchema.computeWireComponentTypes(); - - // noinspection unchecked - final WritableChunk[] destChunks = Arrays.stream(wireChunkTypes) - .map(chunkType -> chunkType.makeWritableChunk(size)).toArray(WritableChunk[]::new); - // zero out the chunks as the marshaller will append to them. - Arrays.stream(destChunks).forEach(dest -> dest.setSize(0)); - - final MethodDescriptor methodDescriptor = BarrageChunkAppendingMarshaller - .getClientDoGetDescriptor(wireChunkTypes, wireTypes, wireComponentTypes, destChunks); - - final Ticket ticket = new Ticket("s/test".getBytes(StandardCharsets.UTF_8)); - final Iterator msgIter = ClientCalls.blockingServerStreamingCall( - clientChannel, methodDescriptor, CallOptions.DEFAULT, - Flight.Ticket.newBuilder().setTicket(ByteString.copyFrom(ticket.getBytes())).build()); - - long totalRows = 0; - while (msgIter.hasNext()) { - totalRows += msgIter.next(); - } - Assert.eq(totalRows, "totalRows", size, "size"); - final LongChunk col_i = destChunks[0].asLongChunk(); - final ObjectChunk col_j = destChunks[1].asObjectChunk(); - Assert.eq(col_i.size(), "col_i.size()", size, "size"); - Assert.eq(col_j.size(), "col_j.size()", size, "size"); - for (int i = 0; i < size; ++i) { - Assert.eq(col_i.get(i), "col_i.get(i)", i, "i"); - Assert.equals(col_j.get(i), "col_j.get(i)", "str_" + i, "str_" + i); - } - } - @Test public void testColumnsAsListFeature() throws Exception { // bind the table in the session From 39ba36edb537e6a38144273d7f855e3f4ccaea7a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:25:46 -0500 Subject: [PATCH 080/219] Make assertion provide more info --- .../barrage/chunk/ByteChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/CharChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/DoubleChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/FloatChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/IntChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/LongChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/ShortChunkInputStreamGenerator.java | 4 +--- 7 files changed, 7 insertions(+), 21 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 59efe7bfb33..194e9982d95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 212ef4c33bd..14fd3f8f79b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -264,9 +264,7 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index e99ee922cac..a4108804f34 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 0daee6eb829..82046928670 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 667c1921c76..4646cc5bdc6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index d8da1b1807d..c0a76261d61 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 4bd912e646b..364b04d0d5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); From 219699dd5d4e9dbb9a6f90933e913ca90dd5cbf1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:44:05 -0500 Subject: [PATCH 081/219] Remove unused BitSet param --- .../extensions/barrage/util/BarrageStreamReader.java | 1 - .../deephaven/extensions/barrage/util/StreamReader.java | 2 -- .../io/deephaven/client/impl/BarrageSnapshotImpl.java | 8 +------- .../io/deephaven/client/impl/BarrageSubscriptionImpl.java | 2 +- 4 files changed, 2 insertions(+), 11 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 3d749adee21..d535ffd0254 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -59,7 +59,6 @@ public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { @Override public BarrageMessage safelyParseFrom(final StreamReaderOptions options, - final BitSet expectedColumns, final ChunkType[] columnChunkTypes, final Class[] columnTypes, final Class[] componentTypes, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java index 93b35af0863..be389e894b6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java @@ -18,7 +18,6 @@ public interface StreamReader { * Converts an InputStream to a BarrageMessage in the context of the provided parameters. * * @param options the options related to parsing this message - * @param expectedColumns the columns expected to appear in the stream, null implies all columns * @param columnChunkTypes the types to use for each column chunk * @param columnTypes the actual type for the column * @param componentTypes the actual component type for the column @@ -26,7 +25,6 @@ public interface StreamReader { * @return a BarrageMessage filled out by the stream's payload */ BarrageMessage safelyParseFrom(final StreamReaderOptions options, - BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index 6310f2cab1f..e0e0b1f7741 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -63,8 +63,6 @@ public class BarrageSnapshotImpl extends ReferenceCountedLivenessNode implements private final BarrageTable resultTable; private final CompletableFuture

future; - private volatile BitSet expectedColumns; - private volatile int connected = 1; private static final AtomicIntegerFieldUpdater CONNECTED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(BarrageSnapshotImpl.class, "connected"); @@ -209,9 +207,6 @@ public Future
partialTable( alreadyUsed = true; } - // store this for streamreader parser - expectedColumns = columns; - // Send the snapshot request: observer.onNext(FlightData.newBuilder() .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns, reverseViewport, options))) @@ -355,8 +350,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, expectedColumns, columnChunkTypes, columnTypes, componentTypes, - stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java index 28d844780c4..0ddf9b9e87b 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java @@ -429,7 +429,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, null, columnChunkTypes, columnTypes, componentTypes, stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } From ede47c89093672ea8f31850a6b5255754c18e211 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 12:19:47 -0500 Subject: [PATCH 082/219] Move BYTES constant into each impl --- .../chunk/ByteChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/CharChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ChunkInputStreamGenerator.java | 22 +++++++++---------- .../DoubleChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/FloatChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/IntChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/LongChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ShortChunkInputStreamGenerator.java | 16 +++++--------- 8 files changed, 52 insertions(+), 82 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 194e9982d95..c2cc7f3e453 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ByteConversion { } static WritableByteChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableByteChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableByteChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableByteChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ByteConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ByteConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 14fd3f8f79b..878bc0a6cd6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -172,7 +172,6 @@ public interface CharConversion { } static WritableCharChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -181,12 +180,11 @@ static WritableCharChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -197,7 +195,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableCharChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -220,7 +218,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableCharChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final CharConversion conversion, final Iterator fieldNodeIter, @@ -263,13 +260,13 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -314,7 +311,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final CharConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -333,7 +329,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -351,7 +347,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 92c62dd1d00..4a3b01b48fb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -213,36 +213,34 @@ static WritableChunk extractChunkFromInputStream( throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: return CharChunkInputStreamGenerator.extractChunkFromInputStream( - Character.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (type == Boolean.class || type == boolean.class) { return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - Byte.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - Short.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: return IntChunkInputStreamGenerator.extractChunkFromInputStream( - Integer.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { return LongChunkInputStreamGenerator.extractChunkFromInputStream( - Long.BYTES, options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - Long.BYTES, options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - Float.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - Double.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (type.isArray()) { if (componentType == byte.class) { @@ -347,7 +345,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalDate.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalDate.ofEpochDay(value / MS_PER_DAY), @@ -355,7 +353,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalTime.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index a4108804f34..c5283a02364 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface DoubleConversion { } static WritableDoubleChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableDoubleChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableDoubleChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final DoubleConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final DoubleConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 82046928670..19b52593bff 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface FloatConversion { } static WritableFloatChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableFloatChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableFloatChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final FloatConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final FloatConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 4646cc5bdc6..91714f4dd43 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface IntConversion { } static WritableIntChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableIntChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableIntChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableIntChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final IntConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final IntConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index c0a76261d61..a28c4006d1d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface LongConversion { } static WritableLongChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableLongChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableLongChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableLongChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final LongConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final LongConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 364b04d0d5f..68a2ecf86b1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ShortConversion { } static WritableShortChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableShortChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableShortChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableShortChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ShortConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ShortConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } From a3b26c7d5cfc3c860d8e01e29e6fa52e1fe473ce Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 08:46:20 -0500 Subject: [PATCH 083/219] Make two reader methods public so they can be accessed from web --- .../barrage/chunk/FixedWidthChunkInputStreamGenerator.java | 2 +- .../barrage/chunk/VarBinaryChunkInputStreamGenerator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java index 46541c8ec1d..7b77b00911b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java @@ -40,7 +40,7 @@ public interface TypeConversion { * @param the result type * @return the resulting chunk of the buffer that is read */ - static WritableObjectChunk extractChunkFromInputStreamWithTypeConversion( + public static WritableObjectChunk extractChunkFromInputStreamWithTypeConversion( final int elementSize, final StreamReaderOptions options, final TypeConversion conversion, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java index e84184be19c..34a460c31d0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java @@ -396,7 +396,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( + public static WritableObjectChunk extractChunkFromInputStream( final DataInput is, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, From df73cd2a39034c22b5aab50ee6b2bb9d2692c767 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 24 Jun 2024 20:36:48 -0500 Subject: [PATCH 084/219] Commit #1 reading chunks, checkpoint to talk to nate, next will try reading schema at beginning of stream --- .../chunk/ChunkInputStreamGenerator.java | 173 +-------------- .../barrage/chunk/ChunkReadingFactory.java | 116 ++++++++++ .../chunk/DefaultChunkReadingFactory.java | 207 ++++++++++++++++++ .../VarListChunkInputStreamGenerator.java | 20 +- .../VectorChunkInputStreamGenerator.java | 23 +- .../barrage/util/ArrowToTableConverter.java | 10 +- .../barrage/util/BarrageStreamReader.java | 19 +- .../client/impl/BarrageSnapshotImpl.java | 3 +- 8 files changed, 381 insertions(+), 190 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 4a3b01b48fb..98cf03908e8 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -10,7 +10,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -19,7 +18,6 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.util.SafeCloseable; -import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; import org.jetbrains.annotations.Nullable; @@ -31,7 +29,6 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZonedDateTime; -import java.util.Arrays; import java.util.Iterator; import java.util.PrimitiveIterator; @@ -187,6 +184,7 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( } } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final ChunkType chunkType, final Class type, final Class componentType, @@ -195,10 +193,10 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, - is, - outChunk, offset, totalRows); + is, outChunk, offset, totalRows); } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, @@ -207,168 +205,9 @@ static WritableChunk extractChunkFromInputStream( final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats - switch (chunkType) { - case Boolean: - throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); - case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Byte: - if (type == Boolean.class || type == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Long: - if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Object: - if (type.isArray()) { - if (componentType == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); - } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream( - options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return VectorChunkInputStreamGenerator.extractChunkFromInputStream( - options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, outOffset, totalRows); - } - if (type == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); - } - if (type == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); - } - if (type == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == String.class || - options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); - } - throw new UnsupportedOperationException( - "Do not yet support column conversion mode: " + options.columnConversionMode()); - default: - throw new UnsupportedOperationException(); - } + return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, + bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java new file mode 100644 index 00000000000..d7cfb18db00 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -0,0 +1,116 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.Type; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * + */ +public interface ChunkReadingFactory { + /** + * + */ + class ChunkTypeInfo { + private final ChunkType chunkType; + private final Class type; + private final Class componentType; + private final Field arrowField; + + public ChunkTypeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { + this.chunkType = chunkType; + this.type = type; + this.componentType = componentType; + this.arrowField = arrowField; + } + + public ChunkType chunkType() { + return chunkType; + } + + public Class type() { + return type; + } + + public Class componentType() { + return componentType; + } + + public Field arrowField() { + return arrowField; + } + + public Field componentArrowField() { + if (arrowField.typeType() != Type.List) { + throw new IllegalStateException("Not a flight List"); + } + if (arrowField.childrenLength() != 1) { + throw new IllegalStateException("Incorrect number of child Fields"); + } + return arrowField.children(0); + } + } + + /** + * + * @param options + * @param factor + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + * @throws IOException + */ + WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final int factor, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; + + /** + * + * @param options + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param offset + * @param totalRows + * @return + * @throws IOException + */ + default WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int offset, + final int totalRows) throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, + totalRows); + } + +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java new file mode 100644 index 00000000000..e7a5bcd665c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -0,0 +1,207 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import com.google.common.base.Charsets; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.type.TypeUtils; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; + +/** + * JVM implementation of ChunkReadingFactory, suitable for use in Java clients and servers. This default implementations + * may not round trip flight types correctly, but will round trip Deephaven table definitions and table data. Neither of + * these is a required/expected property of being a Flight/Barrage/Deephaven client. + */ +public final class DefaultChunkReadingFactory implements ChunkReadingFactory { + public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); + + @Override + public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo, Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats + switch (typeInfo.chunkType()) { + case Boolean: + throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); + case Char: + return CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Byte: + if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { + return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Short: + return ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Int: + return IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Long: + if (factor == 1) { + return LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Float: + return FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Double: + return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Object: + if (typeInfo.type().isArray()) { + if (typeInfo.componentType() == byte.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); + } else { + return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + } + } + if (Vector.class.isAssignableFrom(typeInfo.type())) { + return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); + } + if (typeInfo.type() == BigInteger.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == BigDecimal.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Instant.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == ZonedDateTime.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Byte.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Character.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Double.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Float.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Integer.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Long.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Short.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalDate.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalTime.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == String.class || + options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); + } + throw new UnsupportedOperationException( + "Do not yet support column conversion mode: " + options.columnConversionMode()); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index 0a109230ca6..e1075e7dcf3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -235,19 +235,20 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk extractChunkFromInputStream( final StreamReaderOptions options, - final Class type, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = type.getComponentType(); + final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; final ChunkType chunkType; @@ -259,8 +260,11 @@ static WritableObjectChunk extractChunkFromInputStream( } if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, fieldNodeIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } @@ -299,8 +303,10 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 35245b11631..b7bb8cee6a4 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -235,25 +235,29 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk, Values> extractChunkFromInputStream( final StreamReaderOptions options, - final Class> type, - final Class inComponentType, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = VectorExpansionKernel.getComponentType(type, inComponentType); + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { @@ -296,8 +300,11 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 71f8a81b0fe..b0c62c652c1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,8 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.io.streams.ByteBufferInputStream; import io.deephaven.proto.util.Exceptions; @@ -45,6 +47,7 @@ public class ArrowToTableConverter { private Class[] columnTypes; private Class[] componentTypes; protected BarrageSubscriptionOptions options = DEFAULT_SER_OPTIONS; + private Schema schema; private volatile boolean completed = false; @@ -136,6 +139,7 @@ public synchronized void onCompleted() throws InterruptedException { } protected void parseSchema(final Schema header) { + this.schema = header; // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. @@ -194,8 +198,10 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i msg.addColumnData[ci].data = new ArrayList<>(); final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(ChunkInputStreamGenerator.extractChunkFromInputStream(options, factor, - columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, + acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], + schema.fields(ci)), + fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index d535ffd0254..571082227db 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,6 +19,8 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; @@ -26,6 +28,7 @@ import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; import java.io.IOException; import java.io.InputStream; @@ -51,8 +54,11 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; + private Schema schema; private BarrageMessage msg = null; + private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; } @@ -239,8 +245,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array acd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], schema.fields(ci)), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } @@ -270,8 +278,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array mcd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], null), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } @@ -282,6 +292,7 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages + this.schema = (Schema) header.header(new Schema()); return null; } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index e0e0b1f7741..8ff73e27d93 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -322,7 +322,7 @@ public MethodDescriptor getClientDoExchangeDescripto .build(); } - private class BarrageDataMarshaller implements MethodDescriptor.Marshaller { + private static class BarrageDataMarshaller implements MethodDescriptor.Marshaller { private final BarrageSnapshotOptions options; private final ChunkType[] columnChunkTypes; private final Class[] columnTypes; @@ -366,7 +366,6 @@ public void onError(@NotNull final Throwable t) { } } - /** * The Completable Future is used to encapsulate the concept that the table is filled with requested data. */ From 817fea570cd82fd6ad140c8037b98abdbd407f4b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 06:59:52 -0500 Subject: [PATCH 085/219] Commit #2, mostly mechanical changes, splitting creation and reading --- .../chunk/ChunkInputStreamGenerator.java | 6 +- .../extensions/barrage/chunk/ChunkReader.java | 34 +++ .../barrage/chunk/ChunkReadingFactory.java | 36 +-- .../chunk/DefaultChunkReadingFactory.java | 256 ++++++++++-------- .../VarListChunkInputStreamGenerator.java | 28 +- .../VectorChunkInputStreamGenerator.java | 29 +- .../barrage/util/ArrowToTableConverter.java | 26 +- .../barrage/util/BarrageStreamReader.java | 33 ++- 8 files changed, 245 insertions(+), 203 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 98cf03908e8..a2ae09fb1d0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -197,7 +197,7 @@ static WritableChunk extractChunkFromInputStream( } @Deprecated - static WritableChunk extractChunkFromInputStream( + private static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, final ChunkType chunkType, final Class type, final Class componentType, @@ -206,8 +206,8 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, - bufferInfoIter, is, outChunk, outOffset, totalRows); + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null)) + .read(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java new file mode 100644 index 00000000000..de90744fc0d --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java @@ -0,0 +1,34 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * Consumes Flight/Barrage streams and transforms them into WritableChunks. + */ +public interface ChunkReader { + /** + * + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + */ + WritableChunk read(final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d7cfb18db00..d624f5bc736 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -67,50 +67,24 @@ public Field componentArrowField() { * @param options * @param factor * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param outOffset - * @param totalRows * @return * @throws IOException */ - WritableChunk extractChunkFromInputStream( + ChunkReader extractChunkFromInputStream( final StreamReaderOptions options, final int factor, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException; + final ChunkTypeInfo typeInfo) throws IOException; /** * * @param options * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param offset - * @param totalRows * @return * @throws IOException */ - default WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int offset, - final int totalRows) throws IOException { - return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, - totalRows); + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) + throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index e7a5bcd665c..6a7ceead2d2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -36,167 +36,201 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); @Override - public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo, Iterator fieldNodeIter, - PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, - int totalRows) throws IOException { + public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo) throws IOException { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); } if (typeInfo.type() == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); } if (typeInfo.type() == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); } if (typeInfo.type() == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); } throw new UnsupportedOperationException( "Do not yet support column conversion mode: " + options.columnConversionMode()); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index e1075e7dcf3..a1de362f385 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -244,10 +244,6 @@ static WritableObjectChunk extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; @@ -259,13 +255,18 @@ static WritableObjectChunk extractChunkFromInputStream( chunkType = ChunkType.fromElementType(componentType); } + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, - bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } } @@ -303,11 +304,8 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index b7bb8cee6a4..5ee91971c5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -244,22 +244,22 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { return outChunk.asWritableObjectChunk(); } @@ -300,13 +300,8 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index b0c62c652c1..04e257263a9 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; @@ -31,6 +32,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import static io.deephaven.extensions.barrage.util.BarrageProtoUtil.DEFAULT_SER_OPTIONS; @@ -42,12 +44,10 @@ public class ArrowToTableConverter { protected long totalRowsRead = 0; protected BarrageTable resultTable; - private ChunkType[] columnChunkTypes; - private int[] columnConversionFactors; private Class[] columnTypes; private Class[] componentTypes; protected BarrageSubscriptionOptions options = DEFAULT_SER_OPTIONS; - private Schema schema; + private final List readers = new ArrayList<>(); private volatile boolean completed = false; @@ -139,7 +139,6 @@ public synchronized void onCompleted() throws InterruptedException { } protected void parseSchema(final Schema header) { - this.schema = header; // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. @@ -151,10 +150,18 @@ protected void parseSchema(final Schema header) { resultTable = BarrageTable.make(null, result.tableDef, result.attributes, null); resultTable.setFlat(); - columnConversionFactors = result.conversionFactors; - columnChunkTypes = result.computeWireChunkTypes(); + ChunkType[] columnChunkTypes = result.computeWireChunkTypes(); columnTypes = result.computeWireTypes(); componentTypes = result.computeWireComponentTypes(); + // TODO see the note above, this is not safe since the buffer originated in python - we need to copy the schema + // before doing this + for (int i = 0; i < header.fieldsLength(); i++) { + final int factor = (result.conversionFactors == null) ? 1 : result.conversionFactors[i]; + ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], + header.fields(i))); + readers.add(reader); + } // retain reference until the resultTable can be sealed resultTable.retainReference(); @@ -196,13 +203,8 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i final BarrageMessage.AddColumnData acd = new BarrageMessage.AddColumnData(); msg.addColumnData[ci] = acd; msg.addColumnData[ci].data = new ArrayList<>(); - final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], - schema.fields(ci)), - fieldNodeIter, - bufferInfoIter, mi.inputStream, null, 0, 0)); + acd.data.add(readers.get(ci).read(fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 571082227db..b38b1eedd57 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,12 +19,14 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; +import org.apache.arrow.flatbuf.Field; import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; @@ -37,6 +39,7 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import java.util.function.LongConsumer; @@ -54,10 +57,10 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; - private Schema schema; private BarrageMessage msg = null; private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + private final List readers = new ArrayList<>(); public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; @@ -244,12 +247,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - acd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], schema.fields(ci)), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), (int) batch.length())); + acd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } numAddRowsRead += batch.length(); @@ -277,12 +276,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - mcd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], null), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), numRowsToRead)); + mcd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } numModRowsRead += batch.length(); @@ -292,7 +287,17 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages - this.schema = (Schema) header.header(new Schema()); + Schema schema = new Schema(); + header.header(schema); + for (int i = 0; i < schema.fieldsLength(); i++) { + // TODO as with ArrowToTableConverter, see about copying the bytebuffer so we control the payload + // ourselves + Field field = schema.fields(i); + ChunkReader chunkReader = chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], + componentTypes[i], field)); + readers.add(chunkReader); + } return null; } From 3d287235d0231584a1af93559ce1e8d826a9ea79 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 08:06:43 -0500 Subject: [PATCH 086/219] Commit #3, create vector/array chunk readers to do type lookups once --- .../barrage/chunk/ChunkReadingFactory.java | 11 +- .../chunk/DefaultChunkReadingFactory.java | 18 +-- .../VarListChunkInputStreamGenerator.java | 90 -------------- .../barrage/chunk/VarListChunkReader.java | 114 ++++++++++++++++++ .../VectorChunkInputStreamGenerator.java | 87 ------------- .../barrage/chunk/VectorChunkReader.java | 110 +++++++++++++++++ 6 files changed, 230 insertions(+), 200 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d624f5bc736..d3f8ba84a95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -68,22 +68,17 @@ public Field componentArrowField() { * @param factor * @param typeInfo * @return - * @throws IOException */ - ChunkReader extractChunkFromInputStream( - final StreamReaderOptions options, - final int factor, - final ChunkTypeInfo typeInfo) throws IOException; + ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final int factor, + final ChunkTypeInfo typeInfo); /** * * @param options * @param typeInfo * @return - * @throws IOException */ - default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) - throws IOException { + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) { return extractChunkFromInputStream(options, 1, typeInfo); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index 6a7ceead2d2..df41a1ae7ca 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -4,8 +4,6 @@ package io.deephaven.extensions.barrage.chunk; import com.google.common.base.Charsets; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -13,8 +11,6 @@ import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; -import java.io.DataInput; -import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Instant; @@ -22,8 +18,6 @@ import java.time.LocalTime; import java.time.ZonedDateTime; import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; @@ -37,7 +31,7 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { @Override public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo) throws IOException { + ChunkTypeInfo typeInfo) { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: @@ -93,17 +87,11 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), outChunk, outOffset, totalRows); } else { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return new VarListChunkReader<>(options, typeInfo, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return new VectorChunkReader(options, typeInfo, this); } if (typeInfo.type() == BigInteger.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index a1de362f385..470b0b87291 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -233,95 +233,5 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = typeInfo.type().getComponentType(); - final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; - - final ChunkType chunkType; - if (componentType == boolean.class || componentType == Boolean.class) { - // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. - chunkType = ChunkType.Byte; - } else { - chunkType = ChunkType.fromElementType(componentType); - } - - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); - } - } - - final WritableObjectChunk chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java new file mode 100644 index 00000000000..71c294d6387 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java @@ -0,0 +1,114 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.array.ArrayExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +public class VarListChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VarListChunkReader"; + + private final ArrayExpansionKernel kernel; + private final ChunkReader componentReader; + + public VarListChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + final Class componentType = typeInfo.type().getComponentType(); + final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; + + final ChunkType chunkType; + if (componentType == boolean.class || componentType == Boolean.class) { + // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. + chunkType = ChunkType.Byte; + } else { + chunkType = ChunkType.fromElementType(componentType); + } + kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); + + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + } + + @Override + public WritableObjectChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); + } + } + + final WritableObjectChunk chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 5ee91971c5f..620ae84541f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -232,91 +232,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - static WritableObjectChunk, Values> extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - if (outChunk != null) { - return outChunk.asWritableObjectChunk(); - } - return WritableObjectChunk.makeWritableChunk(totalRows); - } - } - - final WritableObjectChunk, Values> chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java new file mode 100644 index 00000000000..decf9419d9d --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java @@ -0,0 +1,110 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +public class VectorChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VectorChunkReader"; + private final ChunkReader componentReader; + private final VectorExpansionKernel kernel; + + public VectorChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); + } + + @Override + public WritableObjectChunk, Values> read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + if (outChunk != null) { + return outChunk.asWritableObjectChunk(); + } + return WritableObjectChunk.makeWritableChunk(totalRows); + } + } + + final WritableObjectChunk, Values> chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} From a7e2d8c2c2a1c24a6381556bcbff369bbdffee22 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:44:40 -0500 Subject: [PATCH 087/219] Commit #4, replicate new chunk readers for primitives --- .../chunk/ByteChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ByteChunkReader.java | 204 ++++++++++++++++++ .../chunk/CharChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/CharChunkReader.java | 200 +++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 51 ++--- .../DoubleChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/DoubleChunkReader.java | 204 ++++++++++++++++++ .../chunk/FloatChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/FloatChunkReader.java | 204 ++++++++++++++++++ .../chunk/IntChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/IntChunkReader.java | 204 ++++++++++++++++++ .../chunk/LongChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/LongChunkReader.java | 204 ++++++++++++++++++ .../chunk/ShortChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ShortChunkReader.java | 204 ++++++++++++++++++ .../replicators/ReplicateBarrageUtils.java | 3 + 16 files changed, 1440 insertions(+), 1417 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index c2cc7f3e453..d334e031bed 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToByteFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.WritableByteChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ByteChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableByteChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableByteChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - byte value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableByteChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableByteChunk::makeWritableChunk, - WritableChunk::asWritableByteChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset) throws IOException { - if (conversion == ByteConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readByte()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final byte in = is.readByte(); - final byte out = in == NULL_BYTE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readByte())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java new file mode 100644 index 00000000000..29bee0fea05 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_BYTE; + +public class ByteChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ByteChunkReader"; + private final StreamReaderOptions options; + private final ByteConversion conversion; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + public ByteChunkReader(StreamReaderOptions options) { + this(options, ByteConversion.IDENTITY); + } + + public ByteChunkReader(StreamReaderOptions options, ByteConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableByteChunk inner = ByteChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + byte value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableByteChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableByteChunk::makeWritableChunk, + WritableChunk::asWritableByteChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset) throws IOException { + if (conversion == ByteConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readByte()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final byte in = is.readByte(); + final byte out = in == NULL_BYTE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readByte())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 878bc0a6cd6..83b1f2f72f1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -3,10 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToCharFunction; @@ -17,17 +14,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.CharChunk; import io.deephaven.chunk.WritableCharChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -163,192 +154,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("CharChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface CharConversion { - char apply(char in); - - CharConversion IDENTITY = (char a) -> a; - } - - static WritableCharChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableCharChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - char value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableCharChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final CharConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableCharChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableCharChunk::makeWritableChunk, - WritableChunk::asWritableCharChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset) throws IOException { - if (conversion == CharConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readChar()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final char in = is.readChar(); - final char out = in == NULL_CHAR ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readChar())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java new file mode 100644 index 00000000000..b6fce96ffbf --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java @@ -0,0 +1,200 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableCharChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_CHAR; + +public class CharChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "CharChunkReader"; + private final StreamReaderOptions options; + private final CharConversion conversion; + + @FunctionalInterface + public interface CharConversion { + char apply(char in); + + CharConversion IDENTITY = (char a) -> a; + } + + public CharChunkReader(StreamReaderOptions options) { + this(options, CharConversion.IDENTITY); + } + + public CharChunkReader(StreamReaderOptions options, CharConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableCharChunk inner = CharChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + char value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableCharChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableCharChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableCharChunk::makeWritableChunk, + WritableChunk::asWritableCharChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset) throws IOException { + if (conversion == CharConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readChar()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final char in = is.readChar(); + final char out = in == NULL_CHAR ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readChar())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index df41a1ae7ca..bbf5b398fe7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -37,45 +37,28 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ByteChunkReader(options); case Short: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ShortChunkReader(options); case Int: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new IntChunkReader(options); case Long: if (factor == 1) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor)); case Float: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new FloatChunkReader(options); case Double: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new DoubleChunkReader(options); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { @@ -196,20 +179,12 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform(value -> value == QueryConstants.NULL_LONG ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY)); } if (typeInfo.type() == LocalTime.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform( + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value)); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index c5283a02364..a0046b67edb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToDoubleFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.WritableDoubleChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("DoubleChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface DoubleConversion { - double apply(double in); - - DoubleConversion IDENTITY = (double a) -> a; - } - - static WritableDoubleChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableDoubleChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - double value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final DoubleConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableDoubleChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableDoubleChunk::makeWritableChunk, - WritableChunk::asWritableDoubleChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset) throws IOException { - if (conversion == DoubleConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readDouble()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final double in = is.readDouble(); - final double out = in == NULL_DOUBLE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readDouble())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java new file mode 100644 index 00000000000..4b72273272b --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableDoubleChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_DOUBLE; + +public class DoubleChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "DoubleChunkReader"; + private final StreamReaderOptions options; + private final DoubleConversion conversion; + + @FunctionalInterface + public interface DoubleConversion { + double apply(double in); + + DoubleConversion IDENTITY = (double a) -> a; + } + + public DoubleChunkReader(StreamReaderOptions options) { + this(options, DoubleConversion.IDENTITY); + } + + public DoubleChunkReader(StreamReaderOptions options, DoubleConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableDoubleChunk inner = DoubleChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + double value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableDoubleChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableDoubleChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableDoubleChunk::makeWritableChunk, + WritableChunk::asWritableDoubleChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset) throws IOException { + if (conversion == DoubleConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readDouble()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final double in = is.readDouble(); + final double out = in == NULL_DOUBLE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readDouble())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 19b52593bff..edd8aaccb2a 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToFloatFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.FloatChunk; import io.deephaven.chunk.WritableFloatChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("FloatChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface FloatConversion { - float apply(float in); - - FloatConversion IDENTITY = (float a) -> a; - } - - static WritableFloatChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableFloatChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - float value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final FloatConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableFloatChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableFloatChunk::makeWritableChunk, - WritableChunk::asWritableFloatChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset) throws IOException { - if (conversion == FloatConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readFloat()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final float in = is.readFloat(); - final float out = in == NULL_FLOAT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readFloat())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java new file mode 100644 index 00000000000..6d434226235 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableFloatChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_FLOAT; + +public class FloatChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "FloatChunkReader"; + private final StreamReaderOptions options; + private final FloatConversion conversion; + + @FunctionalInterface + public interface FloatConversion { + float apply(float in); + + FloatConversion IDENTITY = (float a) -> a; + } + + public FloatChunkReader(StreamReaderOptions options) { + this(options, FloatConversion.IDENTITY); + } + + public FloatChunkReader(StreamReaderOptions options, FloatConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableFloatChunk inner = FloatChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + float value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableFloatChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableFloatChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableFloatChunk::makeWritableChunk, + WritableChunk::asWritableFloatChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset) throws IOException { + if (conversion == FloatConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readFloat()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final float in = is.readFloat(); + final float out = in == NULL_FLOAT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readFloat())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 91714f4dd43..87bc61b8c6d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToIntFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("IntChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface IntConversion { - int apply(int in); - - IntConversion IDENTITY = (int a) -> a; - } - - static WritableIntChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableIntChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - int value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableIntChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final IntConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableIntChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableIntChunk::makeWritableChunk, - WritableChunk::asWritableIntChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset) throws IOException { - if (conversion == IntConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readInt()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final int in = is.readInt(); - final int out = in == NULL_INT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readInt())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java new file mode 100644 index 00000000000..39bce48735c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_INT; + +public class IntChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "IntChunkReader"; + private final StreamReaderOptions options; + private final IntConversion conversion; + + @FunctionalInterface + public interface IntConversion { + int apply(int in); + + IntConversion IDENTITY = (int a) -> a; + } + + public IntChunkReader(StreamReaderOptions options) { + this(options, IntConversion.IDENTITY); + } + + public IntChunkReader(StreamReaderOptions options, IntConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableIntChunk inner = IntChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + int value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableIntChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableIntChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableIntChunk::makeWritableChunk, + WritableChunk::asWritableIntChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset) throws IOException { + if (conversion == IntConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readInt()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final int in = is.readInt(); + final int out = in == NULL_INT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readInt())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index a28c4006d1d..671d972ccce 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToLongFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("LongChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface LongConversion { - long apply(long in); - - LongConversion IDENTITY = (long a) -> a; - } - - static WritableLongChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableLongChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - long value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableLongChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final LongConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableLongChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableLongChunk::makeWritableChunk, - WritableChunk::asWritableLongChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset) throws IOException { - if (conversion == LongConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readLong()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final long in = is.readLong(); - final long out = in == NULL_LONG ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readLong())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java new file mode 100644 index 00000000000..743e0a37c8f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_LONG; + +public class LongChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "LongChunkReader"; + private final StreamReaderOptions options; + private final LongConversion conversion; + + @FunctionalInterface + public interface LongConversion { + long apply(long in); + + LongConversion IDENTITY = (long a) -> a; + } + + public LongChunkReader(StreamReaderOptions options) { + this(options, LongConversion.IDENTITY); + } + + public LongChunkReader(StreamReaderOptions options, LongConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableLongChunk inner = LongChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + long value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableLongChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableLongChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableLongChunk::makeWritableChunk, + WritableChunk::asWritableLongChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset) throws IOException { + if (conversion == LongConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readLong()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final long in = is.readLong(); + final long out = in == NULL_LONG ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readLong())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 68a2ecf86b1..4fd81b47d03 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToShortFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.WritableShortChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ShortChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ShortConversion { - short apply(short in); - - ShortConversion IDENTITY = (short a) -> a; - } - - static WritableShortChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableShortChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - short value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableShortChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ShortConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableShortChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableShortChunk::makeWritableChunk, - WritableChunk::asWritableShortChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset) throws IOException { - if (conversion == ShortConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readShort()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final short in = is.readShort(); - final short out = in == NULL_SHORT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readShort())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java new file mode 100644 index 00000000000..56c17c2c11f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableShortChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_SHORT; + +public class ShortChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ShortChunkReader"; + private final StreamReaderOptions options; + private final ShortConversion conversion; + + @FunctionalInterface + public interface ShortConversion { + short apply(short in); + + ShortConversion IDENTITY = (short a) -> a; + } + + public ShortChunkReader(StreamReaderOptions options) { + this(options, ShortConversion.IDENTITY); + } + + public ShortChunkReader(StreamReaderOptions options, ShortConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableShortChunk inner = ShortChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + short value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableShortChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableShortChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableShortChunk::makeWritableChunk, + WritableChunk::asWritableShortChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset) throws IOException { + if (conversion == ShortConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readShort()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final short in = is.readShort(); + final short out = in == NULL_SHORT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readShort())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java index 63670ad93d7..6824f8d91f9 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java @@ -25,6 +25,9 @@ public static void main(final String[] args) throws IOException { fixupChunkInputStreamGen(CHUNK_PACKAGE + "/LongChunkInputStreamGenerator.java", "Long"); fixupChunkInputStreamGen(CHUNK_PACKAGE + "/DoubleChunkInputStreamGenerator.java", "Double"); + ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", + CHUNK_PACKAGE + "/CharChunkReader.java"); + ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", CHUNK_PACKAGE + "/array/CharArrayExpansionKernel.java"); From db7a35ddd3e4f1cc1024a4c7f6f0fe275ae36486 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:56:21 -0500 Subject: [PATCH 088/219] Commit #5, also boolean chunk reader --- .../BooleanChunkInputStreamGenerator.java | 126 ---------------- .../barrage/chunk/BooleanChunkReader.java | 141 ++++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 4 +- 3 files changed, 142 insertions(+), 129 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java index 8c1503b67f8..e1f478ef5ef 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java @@ -153,130 +153,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - - static WritableChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk; - if (outChunk != null) { - chunk = outChunk.asWritableByteChunk(); - } else { - final int numRows = Math.max(totalRows, nodeInfo.numElements); - chunk = WritableByteChunk.makeWritableChunk(numRows); - chunk.setSize(numRows); - } - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); - if (payloadBuffer < numPayloadBytesNeeded) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } - - // cannot use deephaven nulls as booleans are not nullable - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - - // flight requires that the payload buffer be padded to multiples of 8 bytes - final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - long payloadWord = is.readLong(); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE - : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; - chunk.set(offset + ei++, conversion.apply(value)); - validityWord >>= 1; - payloadWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - payloadWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java new file mode 100644 index 00000000000..da0cc96cad4 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java @@ -0,0 +1,141 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.util.BooleanUtils; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.BaseChunkInputStreamGenerator.getNumLongsForBitPackOfSize; + +public class BooleanChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "BooleanChunkReader"; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + private final ByteConversion conversion; + + public BooleanChunkReader() { + this(ByteConversion.IDENTITY); + } + + public BooleanChunkReader(ByteConversion conversion) { + this.conversion = conversion; + } + + @Override + public WritableChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk; + if (outChunk != null) { + chunk = outChunk.asWritableByteChunk(); + } else { + final int numRows = Math.max(totalRows, nodeInfo.numElements); + chunk = WritableByteChunk.makeWritableChunk(numRows); + chunk.setSize(numRows); + } + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); + if (payloadBuffer < numPayloadBytesNeeded) { + throw new IllegalStateException("payload buffer is too short for expected number of elements"); + } + + // cannot use deephaven nulls as booleans are not nullable + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + + // flight requires that the payload buffer be padded to multiples of 8 bytes + final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + long payloadWord = is.readLong(); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE + : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; + chunk.set(offset + ei++, conversion.apply(value)); + validityWord >>= 1; + payloadWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + payloadWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index bbf5b398fe7..d03a0758701 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -40,9 +40,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new BooleanChunkReader(); } return new ByteChunkReader(options); case Short: From f81ad20f247739cecfffa92c2309624cfc92c10e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 12:07:09 -0500 Subject: [PATCH 089/219] Round two, commit #1, apply the same to chunkinputstreamgenerator --- .../barrage/BarrageStreamGeneratorImpl.java | 6 +- .../ChunkListInputStreamGenerator.java | 9 +- .../chunk/ChunkInputStreamGenerator.java | 160 ---------------- .../ChunkInputStreamGeneratorFactory.java | 30 +++ ...faultChunkInputStreamGeneratorFactory.java | 178 ++++++++++++++++++ .../VarListChunkInputStreamGenerator.java | 14 +- .../VectorChunkInputStreamGenerator.java | 13 +- .../chunk/BarrageColumnRoundTripTest.java | 4 +- 8 files changed, 229 insertions(+), 185 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGeneratorFactory.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkInputStreamGeneratorFactory.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 9aa0c9376c5..fdc161a7797 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -25,6 +25,7 @@ import io.deephaven.engine.rowset.impl.ExternalizableRowSetUtils; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.DefaultChunkInputStreamGeneratorFactory; import io.deephaven.extensions.barrage.chunk.SingleElementListHeaderInputStreamGenerator; import io.deephaven.extensions.barrage.util.BarrageProtoUtil.ExposedByteArrayOutputStream; import io.deephaven.extensions.barrage.util.BarrageUtil; @@ -124,7 +125,8 @@ public static class ModColumnGenerator implements SafeCloseable { ModColumnGenerator(final BarrageMessage.ModColumnData col) throws IOException { rowsModified = new RowSetGenerator(col.rowsModified); - data = new ChunkListInputStreamGenerator(col.type, col.componentType, col.data, col.chunkType); + data = new ChunkListInputStreamGenerator(col.type, col.componentType, col.data, col.chunkType, + DefaultChunkInputStreamGeneratorFactory.INSTANCE); } @Override @@ -174,7 +176,7 @@ public BarrageStreamGeneratorImpl(final BarrageMessage message, for (int i = 0; i < message.addColumnData.length; ++i) { BarrageMessage.AddColumnData columnData = message.addColumnData[i]; addColumnData[i] = new ChunkListInputStreamGenerator(columnData.type, columnData.componentType, - columnData.data, columnData.chunkType); + columnData.data, columnData.chunkType, DefaultChunkInputStreamGeneratorFactory.INSTANCE); } modColumnData = new ModColumnGenerator[message.modColumnData.length]; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java index f64be56149f..2e73541fce1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSet; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGeneratorFactory; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.util.SafeCloseable; @@ -20,20 +21,18 @@ public class ChunkListInputStreamGenerator implements SafeCloseable { private final ChunkInputStreamGenerator emptyGenerator; public ChunkListInputStreamGenerator(Class type, Class componentType, List> data, - ChunkType chunkType) { + ChunkType chunkType, ChunkInputStreamGeneratorFactory factory) { // create an input stream generator for each chunk ChunkInputStreamGenerator[] generators = new ChunkInputStreamGenerator[data.size()]; long rowOffset = 0; for (int i = 0; i < data.size(); ++i) { final Chunk valuesChunk = data.get(i); - generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, - valuesChunk, rowOffset); + generators[i] = factory.makeInputStreamGenerator(chunkType, type, componentType, valuesChunk, rowOffset); rowOffset += valuesChunk.size(); } this.generators = Arrays.asList(generators); - emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( - chunkType, type, componentType, chunkType.getEmptyChunk(), 0); + emptyGenerator = factory.makeInputStreamGenerator(chunkType, type, componentType, chunkType.getEmptyChunk(), 0); } public List generators() { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index a2ae09fb1d0..80ceccdaba5 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -3,32 +3,19 @@ // package io.deephaven.extensions.barrage.chunk; -import com.google.common.base.Charsets; -import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.extensions.barrage.util.StreamReaderOptions; -import io.deephaven.time.DateTimeUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.util.SafeCloseable; -import io.deephaven.vector.Vector; import org.jetbrains.annotations.Nullable; import java.io.DataInput; import java.io.IOException; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalTime; -import java.time.ZonedDateTime; import java.util.Iterator; import java.util.PrimitiveIterator; @@ -37,153 +24,6 @@ public interface ChunkInputStreamGenerator extends SafeCloseable { long MIN_LOCAL_DATE_VALUE = QueryConstants.MIN_LONG / MS_PER_DAY; long MAX_LOCAL_DATE_VALUE = QueryConstants.MAX_LONG / MS_PER_DAY; - static ChunkInputStreamGenerator makeInputStreamGenerator( - final ChunkType chunkType, - final Class type, - final Class componentType, - final Chunk chunk, - final long rowOffset) { - // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats - switch (chunkType) { - case Boolean: - throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); - case Char: - return new CharChunkInputStreamGenerator(chunk.asCharChunk(), Character.BYTES, rowOffset); - case Byte: - if (type == Boolean.class || type == boolean.class) { - // internally we represent booleans as bytes, but the wire format respects arrow's specification - return new BooleanChunkInputStreamGenerator(chunk.asByteChunk(), rowOffset); - } - return new ByteChunkInputStreamGenerator(chunk.asByteChunk(), Byte.BYTES, rowOffset); - case Short: - return new ShortChunkInputStreamGenerator(chunk.asShortChunk(), Short.BYTES, rowOffset); - case Int: - return new IntChunkInputStreamGenerator(chunk.asIntChunk(), Integer.BYTES, rowOffset); - case Long: - return new LongChunkInputStreamGenerator(chunk.asLongChunk(), Long.BYTES, rowOffset); - case Float: - return new FloatChunkInputStreamGenerator(chunk.asFloatChunk(), Float.BYTES, rowOffset); - case Double: - return new DoubleChunkInputStreamGenerator(chunk.asDoubleChunk(), Double.BYTES, rowOffset); - case Object: - if (type.isArray()) { - if (componentType == byte.class) { - return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, - (out, item) -> out.write((byte[]) item)); - } else { - return new VarListChunkInputStreamGenerator<>(type, chunk.asObjectChunk(), rowOffset); - } - } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return new VectorChunkInputStreamGenerator( - (Class>) type, componentType, chunk.asObjectChunk(), rowOffset); - } - if (type == String.class) { - return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, - (out, str) -> out.write(str.getBytes(Charsets.UTF_8))); - } - if (type == BigInteger.class) { - return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, - (out, item) -> out.write(item.toByteArray())); - } - if (type == BigDecimal.class) { - return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, - (out, item) -> { - final BigDecimal normal = item.stripTrailingZeros(); - final int v = normal.scale(); - // Write as little endian, arrow endianness. - out.write(0xFF & v); - out.write(0xFF & (v >> 8)); - out.write(0xFF & (v >> 16)); - out.write(0xFF & (v >> 24)); - out.write(normal.unscaledValue().toByteArray()); - }); - } - if (type == Instant.class) { - // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. - ObjectChunk objChunk = chunk.asObjectChunk(); - WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); - for (int i = 0; i < objChunk.size(); ++i) { - outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); - } - if (chunk instanceof PoolableChunk) { - ((PoolableChunk) chunk).close(); - } - return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); - } - if (type == ZonedDateTime.class) { - // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. - ObjectChunk objChunk = chunk.asObjectChunk(); - WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); - for (int i = 0; i < objChunk.size(); ++i) { - outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); - } - if (chunk instanceof PoolableChunk) { - ((PoolableChunk) chunk).close(); - } - return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); - } - if (type == Boolean.class) { - return BooleanChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Byte.class) { - return ByteChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Character.class) { - return CharChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Double.class) { - return DoubleChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Float.class) { - return FloatChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Integer.class) { - return IntChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Long.class) { - return LongChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == Short.class) { - return ShortChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); - } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), - rowOffset, date -> { - if (date == null) { - return QueryConstants.NULL_LONG; - } - final long epochDay = date.toEpochDay(); - if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { - throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay - + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); - } - return epochDay * MS_PER_DAY; - }); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), - rowOffset, time -> { - if (time == null) { - return QueryConstants.NULL_LONG; - } - final long nanoOfDay = time.toNanoOfDay(); - if (nanoOfDay < 0) { - throw new IllegalArgumentException("Time out of range: " + time); - } - return nanoOfDay; - }); - } - // TODO (core#936): support column conversion modes - - return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, - (out, item) -> out.write(item.toString().getBytes(Charsets.UTF_8))); - default: - throw new UnsupportedOperationException(); - } - } - @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGeneratorFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGeneratorFactory.java new file mode 100644 index 00000000000..88b65ad9af9 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGeneratorFactory.java @@ -0,0 +1,30 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.attributes.Values; + +/** + * + */ +public interface ChunkInputStreamGeneratorFactory { + /** + * + * @param chunkType + * @param type + * @param componentType + * @param chunk + * @param rowOffset + * @return + * @param + */ + ChunkInputStreamGenerator makeInputStreamGenerator( + final ChunkType chunkType, + final Class type, + final Class componentType, + final Chunk chunk, + final long rowOffset); +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkInputStreamGeneratorFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkInputStreamGeneratorFactory.java new file mode 100644 index 00000000000..04ce8208170 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkInputStreamGeneratorFactory.java @@ -0,0 +1,178 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import com.google.common.base.Charsets; +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.ObjectChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.chunk.util.pools.PoolableChunk; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.vector.Vector; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZonedDateTime; + +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MAX_LOCAL_DATE_VALUE; +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MIN_LOCAL_DATE_VALUE; +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; + +/** + * + */ +public class DefaultChunkInputStreamGeneratorFactory implements ChunkInputStreamGeneratorFactory { + public static final DefaultChunkInputStreamGeneratorFactory INSTANCE = + new DefaultChunkInputStreamGeneratorFactory(); + + @Override + public ChunkInputStreamGenerator makeInputStreamGenerator(ChunkType chunkType, Class type, + Class componentType, Chunk chunk, long rowOffset) { + // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats + switch (chunkType) { + case Boolean: + throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); + case Char: + return new CharChunkInputStreamGenerator(chunk.asCharChunk(), Character.BYTES, rowOffset); + case Byte: + if (type == Boolean.class || type == boolean.class) { + // internally we represent booleans as bytes, but the wire format respects arrow's specification + return new BooleanChunkInputStreamGenerator(chunk.asByteChunk(), rowOffset); + } + return new ByteChunkInputStreamGenerator(chunk.asByteChunk(), Byte.BYTES, rowOffset); + case Short: + return new ShortChunkInputStreamGenerator(chunk.asShortChunk(), Short.BYTES, rowOffset); + case Int: + return new IntChunkInputStreamGenerator(chunk.asIntChunk(), Integer.BYTES, rowOffset); + case Long: + return new LongChunkInputStreamGenerator(chunk.asLongChunk(), Long.BYTES, rowOffset); + case Float: + return new FloatChunkInputStreamGenerator(chunk.asFloatChunk(), Float.BYTES, rowOffset); + case Double: + return new DoubleChunkInputStreamGenerator(chunk.asDoubleChunk(), Double.BYTES, rowOffset); + case Object: + if (type.isArray()) { + if (componentType == byte.class) { + return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, + (out, item) -> out.write((byte[]) item)); + } else { + return new VarListChunkInputStreamGenerator<>(type, chunk.asObjectChunk(), rowOffset, this); + } + } + if (Vector.class.isAssignableFrom(type)) { + // noinspection unchecked + return new VectorChunkInputStreamGenerator( + (Class>) type, componentType, chunk.asObjectChunk(), rowOffset, this); + } + if (type == String.class) { + return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, + (out, str) -> out.write(str.getBytes(Charsets.UTF_8))); + } + if (type == BigInteger.class) { + return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, + (out, item) -> out.write(item.toByteArray())); + } + if (type == BigDecimal.class) { + return new VarBinaryChunkInputStreamGenerator(chunk.asObjectChunk(), rowOffset, + (out, item) -> { + final BigDecimal normal = item.stripTrailingZeros(); + final int v = normal.scale(); + // Write as little endian, arrow endianness. + out.write(0xFF & v); + out.write(0xFF & (v >> 8)); + out.write(0xFF & (v >> 16)); + out.write(0xFF & (v >> 24)); + out.write(normal.unscaledValue().toByteArray()); + }); + } + if (type == Instant.class) { + // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + ObjectChunk objChunk = chunk.asObjectChunk(); + WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + for (int i = 0; i < objChunk.size(); ++i) { + outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); + } + if (chunk instanceof PoolableChunk) { + ((PoolableChunk) chunk).close(); + } + return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + } + if (type == ZonedDateTime.class) { + // This code path is utilized for arrays and vectors of Instant, which cannot be reinterpreted. + ObjectChunk objChunk = chunk.asObjectChunk(); + WritableLongChunk outChunk = WritableLongChunk.makeWritableChunk(objChunk.size()); + for (int i = 0; i < objChunk.size(); ++i) { + outChunk.set(i, DateTimeUtils.epochNanos(objChunk.get(i))); + } + if (chunk instanceof PoolableChunk) { + ((PoolableChunk) chunk).close(); + } + return new LongChunkInputStreamGenerator(outChunk, Long.BYTES, rowOffset); + } + if (type == Boolean.class) { + return BooleanChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Byte.class) { + return ByteChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Character.class) { + return CharChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Double.class) { + return DoubleChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Float.class) { + return FloatChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Integer.class) { + return IntChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Long.class) { + return LongChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == Short.class) { + return ShortChunkInputStreamGenerator.convertBoxed(chunk.asObjectChunk(), rowOffset); + } + if (type == LocalDate.class) { + return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), + rowOffset, date -> { + if (date == null) { + return QueryConstants.NULL_LONG; + } + final long epochDay = date.toEpochDay(); + if (epochDay < MIN_LOCAL_DATE_VALUE || epochDay > MAX_LOCAL_DATE_VALUE) { + throw new IllegalArgumentException("Date out of range: " + date + " (" + epochDay + + " not in [" + MIN_LOCAL_DATE_VALUE + ", " + MAX_LOCAL_DATE_VALUE + "])"); + } + return epochDay * MS_PER_DAY; + }); + } + if (type == LocalTime.class) { + return LongChunkInputStreamGenerator.convertWithTransform(chunk.asObjectChunk(), + rowOffset, time -> { + if (time == null) { + return QueryConstants.NULL_LONG; + } + final long nanoOfDay = time.toNanoOfDay(); + if (nanoOfDay < 0) { + throw new IllegalArgumentException("Time out of range: " + time); + } + return nanoOfDay; + }); + } + // TODO (core#936): support column conversion modes + + return new VarBinaryChunkInputStreamGenerator<>(chunk.asObjectChunk(), rowOffset, + (out, item) -> out.write(item.toString().getBytes(Charsets.UTF_8))); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index 470b0b87291..dc96451369d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -13,8 +13,6 @@ import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -23,11 +21,8 @@ import io.deephaven.util.mutable.MutableInt; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; public class VarListChunkInputStreamGenerator extends BaseChunkInputStreamGenerator> { private static final String DEBUG_NAME = "VarListChunkInputStreamGenerator"; @@ -37,9 +32,13 @@ public class VarListChunkInputStreamGenerator extends BaseChunkInputStreamGen private WritableIntChunk offsets; private ChunkInputStreamGenerator innerGenerator; - VarListChunkInputStreamGenerator(final Class type, final ObjectChunk chunk, final long rowOffset) { + private final ChunkInputStreamGeneratorFactory factory; + + VarListChunkInputStreamGenerator(final Class type, final ObjectChunk chunk, final long rowOffset, + ChunkInputStreamGeneratorFactory factory) { super(chunk, 0, rowOffset); this.type = type; + this.factory = factory; } private synchronized void computePayload() { @@ -58,8 +57,7 @@ private synchronized void computePayload() { offsets = WritableIntChunk.makeWritableChunk(chunk.size() + 1); final WritableChunk innerChunk = kernel.expand(chunk, offsets); - innerGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( - chunkType, myType, myComponentType, innerChunk, 0); + innerGenerator = factory.makeInputStreamGenerator(chunkType, myType, myComponentType, innerChunk, 0); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 620ae84541f..164aa6d0a96 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -9,8 +9,6 @@ import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.ChunkPositions; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; @@ -24,11 +22,8 @@ import io.deephaven.util.mutable.MutableInt; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; public class VectorChunkInputStreamGenerator extends BaseChunkInputStreamGenerator, Values>> { private static final String DEBUG_NAME = "VarListChunkInputStreamGenerator"; @@ -38,12 +33,15 @@ public class VectorChunkInputStreamGenerator extends BaseChunkInputStreamGenerat private WritableIntChunk offsets; private ChunkInputStreamGenerator innerGenerator; + private final ChunkInputStreamGeneratorFactory factory; + VectorChunkInputStreamGenerator( final Class> type, final Class componentType, final ObjectChunk, Values> chunk, - final long rowOffset) { + final long rowOffset, ChunkInputStreamGeneratorFactory factory) { super(chunk, 0, rowOffset); + this.factory = factory; this.componentType = VectorExpansionKernel.getComponentType(type, componentType); } @@ -58,8 +56,7 @@ private synchronized void computePayload() { offsets = WritableIntChunk.makeWritableChunk(chunk.size() + 1); final WritableChunk innerChunk = kernel.expand(chunk, offsets); - innerGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator( - chunkType, componentType, innerComponentType, innerChunk, 0); + innerGenerator = factory.makeInputStreamGenerator(chunkType, componentType, innerComponentType, innerChunk, 0); } @Override diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index fdda13a40bf..e4311441890 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -646,8 +646,8 @@ private static void testRoundTripSerialization( data.copyFromChunk(srcData, 0, 0, srcData.size()); try (SafeCloseable ignored = data; - ChunkInputStreamGenerator generator = ChunkInputStreamGenerator.makeInputStreamGenerator( - chunkType, type, type.getComponentType(), srcData, 0)) { + ChunkInputStreamGenerator generator = DefaultChunkInputStreamGeneratorFactory.INSTANCE + .makeInputStreamGenerator(chunkType, type, type.getComponentType(), srcData, 0)) { // full sub logic try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = new BarrageProtoUtil.ExposedByteArrayOutputStream(); From 86a5ffb7544e61d3edb3ef931cf5f6272607d331 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:21:25 -0500 Subject: [PATCH 090/219] Remove dead class --- .../util/BarrageChunkAppendingMarshaller.java | 206 ------------------ .../test/FlightMessageRoundTripTest.java | 48 ---- 2 files changed, 254 deletions(-) delete mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java deleted file mode 100644 index edbf60d72a1..00000000000 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageChunkAppendingMarshaller.java +++ /dev/null @@ -1,206 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.extensions.barrage.util; - -import com.google.common.io.LittleEndianDataInputStream; -import com.google.protobuf.CodedInputStream; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.BarrageSnapshotOptions; -import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; -import io.deephaven.util.datastructures.LongSizedDataStructure; -import io.deephaven.chunk.ChunkType; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.grpc.MethodDescriptor; -import io.grpc.protobuf.ProtoUtils; -import org.apache.arrow.flatbuf.Message; -import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; -import org.apache.arrow.flight.impl.Flight; -import org.apache.arrow.flight.impl.FlightServiceGrpc; - -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; - -/** - * This class is used to append the results of a DoGet directly into destination {@link WritableChunk}. - *

- * It will append the results of a DoGet into the destination chunks, and notify the listener of the number of rows - * appended to the record batch in total. The user will typically want to wait for OnCompletion to be called before - * assuming they have received all the data. - */ -public class BarrageChunkAppendingMarshaller implements MethodDescriptor.Marshaller { - - /** - * Fetch the client side descriptor for a specific DoGet invocation. - *

- * Instead of providing BarrageMessage as the response type, this custom marshaller will return the number of rows - * appended after each RecordBatch. This is informative yet hands-off process reading data into the chunks. - * - * @param columnChunkTypes the chunk types per column - * @param columnTypes the class type per column - * @param componentTypes the component class type per column - * @param destChunks the destination chunks - * @return the client side method descriptor - */ - public static MethodDescriptor getClientDoGetDescriptor( - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - final MethodDescriptor.Marshaller requestMarshaller = - ProtoUtils.marshaller(Flight.Ticket.getDefaultInstance()); - final MethodDescriptor descriptor = FlightServiceGrpc.getDoGetMethod(); - - return MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.SERVER_STREAMING) - .setFullMethodName(descriptor.getFullMethodName()) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(new BarrageChunkAppendingMarshaller( - BARRAGE_OPTIONS, columnChunkTypes, columnTypes, componentTypes, destChunks)) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(); - } - - // DoGet does not get to set any options - private static final BarrageSnapshotOptions BARRAGE_OPTIONS = BarrageSnapshotOptions.builder().build(); - - private static final Logger log = LoggerFactory.getLogger(BarrageChunkAppendingMarshaller.class); - - private final BarrageSnapshotOptions options; - - private final ChunkType[] columnChunkTypes; - private final Class[] columnTypes; - private final Class[] componentTypes; - - private final WritableChunk[] destChunks; - private long numRowsRead = 0; - - public BarrageChunkAppendingMarshaller( - final BarrageSnapshotOptions options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final WritableChunk[] destChunks) { - this.options = options; - this.columnChunkTypes = columnChunkTypes; - this.columnTypes = columnTypes; - this.componentTypes = componentTypes; - this.destChunks = destChunks; - } - - @Override - public InputStream stream(final Integer value) { - throw new UnsupportedOperationException( - "BarrageDataMarshaller unexpectedly used to directly convert BarrageMessage to InputStream"); - } - - @Override - public Integer parse(final InputStream stream) { - Message header = null; - try { - boolean bodyParsed = false; - - final CodedInputStream decoder = CodedInputStream.newInstance(stream); - - for (int tag = decoder.readTag(); tag != 0; tag = decoder.readTag()) { - if (tag == BarrageProtoUtil.DATA_HEADER_TAG) { - final int size = decoder.readRawVarint32(); - header = Message.getRootAsMessage(ByteBuffer.wrap(decoder.readRawBytes(size))); - continue; - } else if (tag != BarrageProtoUtil.BODY_TAG) { - decoder.skipField(tag); - continue; - } - - if (bodyParsed) { - // although not an error for protobuf, arrow payloads should consider it one - throw new IllegalStateException("Unexpected duplicate body tag"); - } - - if (header == null) { - throw new IllegalStateException("Missing metadata header; cannot decode body"); - } - - if (header.headerType() != org.apache.arrow.flatbuf.MessageHeader.RecordBatch) { - throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); - } - - bodyParsed = true; - final int size = decoder.readRawVarint32(); - final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); - - // noinspection UnstableApiUsage - try (final LittleEndianDataInputStream ois = - new LittleEndianDataInputStream(new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size))) { - final Iterator fieldNodeIter = - new FlatBufferIteratorAdapter<>(batch.nodesLength(), - i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); - - final long[] bufferInfo = new long[batch.buffersLength()]; - for (int bi = 0; bi < batch.buffersLength(); ++bi) { - int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); - int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); - if (bi < batch.buffersLength() - 1) { - final int nextOffset = - LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); - // our parsers handle overhanging buffers - length += Math.max(0, nextOffset - offset - length); - } - bufferInfo[bi] = length; - } - final PrimitiveIterator.OfLong bufferInfoIter = Arrays.stream(bufferInfo).iterator(); - - for (int ci = 0; ci < destChunks.length; ++ci) { - final WritableChunk dest = destChunks[ci]; - - final long remaining = dest.capacity() - dest.size(); - if (batch.length() > remaining) { - throw new BarrageMarshallingException(String.format("Received RecordBatch length (%d) " + - "exceeds the remaining capacity (%d) of the destination Chunk.", batch.length(), - remaining)); - } - - // Barrage should return the provided chunk since there was enough room to append the data - final WritableChunk retChunk = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, - bufferInfoIter, ois, dest, dest.size(), (int) batch.length()); - - if (retChunk != dest) { - throw new BarrageMarshallingException("Unexpected chunk returned from " + - "ChunkInputStreamGenerator.extractChunkFromInputStream"); - } - - // barrage does not alter the destination chunk size, so let's set it ourselves - dest.setSize(dest.size() + (int) batch.length()); - } - numRowsRead += batch.length(); - } - } - - if (header != null && header.headerType() == MessageHeader.Schema) { - // getting started, but no rows yet; schemas do not have body tags - return 0; - } - - if (!bodyParsed) { - throw new IllegalStateException("Missing body tag"); - } - - // we're appending directly to the chunk, but courteously let our user know how many rows were read - return (int) numRowsRead; - } catch (final Exception e) { - log.error().append("Unable to parse a received DoGet: ").append(e).endl(); - if (e instanceof BarrageMarshallingException) { - throw (BarrageMarshallingException) e; - } - throw new GrpcMarshallingException("Unable to parse DoGet", e); - } - } -} diff --git a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java index 95646536910..bbc47c6f1d3 100644 --- a/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java +++ b/server/test-utils/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java @@ -38,7 +38,6 @@ import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; -import io.deephaven.extensions.barrage.util.BarrageChunkAppendingMarshaller; import io.deephaven.extensions.barrage.util.BarrageUtil; import io.deephaven.io.logger.LogBuffer; import io.deephaven.io.logger.LogBufferGlobal; @@ -1065,53 +1064,6 @@ private void assertRoundTripDataEqual(Table deephavenTable, Consumer[] wireTypes = convertedSchema.computeWireTypes(); - final Class[] wireComponentTypes = convertedSchema.computeWireComponentTypes(); - - // noinspection unchecked - final WritableChunk[] destChunks = Arrays.stream(wireChunkTypes) - .map(chunkType -> chunkType.makeWritableChunk(size)).toArray(WritableChunk[]::new); - // zero out the chunks as the marshaller will append to them. - Arrays.stream(destChunks).forEach(dest -> dest.setSize(0)); - - final MethodDescriptor methodDescriptor = BarrageChunkAppendingMarshaller - .getClientDoGetDescriptor(wireChunkTypes, wireTypes, wireComponentTypes, destChunks); - - final Ticket ticket = new Ticket("s/test".getBytes(StandardCharsets.UTF_8)); - final Iterator msgIter = ClientCalls.blockingServerStreamingCall( - clientChannel, methodDescriptor, CallOptions.DEFAULT, - Flight.Ticket.newBuilder().setTicket(ByteString.copyFrom(ticket.getBytes())).build()); - - long totalRows = 0; - while (msgIter.hasNext()) { - totalRows += msgIter.next(); - } - Assert.eq(totalRows, "totalRows", size, "size"); - final LongChunk col_i = destChunks[0].asLongChunk(); - final ObjectChunk col_j = destChunks[1].asObjectChunk(); - Assert.eq(col_i.size(), "col_i.size()", size, "size"); - Assert.eq(col_j.size(), "col_j.size()", size, "size"); - for (int i = 0; i < size; ++i) { - Assert.eq(col_i.get(i), "col_i.get(i)", i, "i"); - Assert.equals(col_j.get(i), "col_j.get(i)", "str_" + i, "str_" + i); - } - } - @Test public void testColumnsAsListFeature() throws Exception { // bind the table in the session From 8eea3a395ab4b553244c3d941683f196f1ab872e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:25:46 -0500 Subject: [PATCH 091/219] Make assertion provide more info --- .../barrage/chunk/ByteChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/CharChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/DoubleChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/FloatChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/IntChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/LongChunkInputStreamGenerator.java | 4 +--- .../barrage/chunk/ShortChunkInputStreamGenerator.java | 4 +--- 7 files changed, 7 insertions(+), 21 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 59efe7bfb33..194e9982d95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 212ef4c33bd..14fd3f8f79b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -264,9 +264,7 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index e99ee922cac..a4108804f34 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 0daee6eb829..82046928670 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 667c1921c76..4646cc5bdc6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index d8da1b1807d..c0a76261d61 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -269,9 +269,7 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 4bd912e646b..364b04d0d5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -268,9 +268,7 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( // consumed entire validity buffer by here final long payloadRead = (long) nodeInfo.numElements * elementSize; - if (payloadBuffer < payloadRead) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); From 1d36724242677ea5e4eae3bf62789bfb9e57c5bb Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 18 Jun 2024 15:44:05 -0500 Subject: [PATCH 092/219] Remove unused BitSet param --- .../extensions/barrage/util/BarrageStreamReader.java | 1 - .../deephaven/extensions/barrage/util/StreamReader.java | 2 -- .../io/deephaven/client/impl/BarrageSnapshotImpl.java | 8 +------- .../io/deephaven/client/impl/BarrageSubscriptionImpl.java | 2 +- 4 files changed, 2 insertions(+), 11 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 3d749adee21..d535ffd0254 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -59,7 +59,6 @@ public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { @Override public BarrageMessage safelyParseFrom(final StreamReaderOptions options, - final BitSet expectedColumns, final ChunkType[] columnChunkTypes, final Class[] columnTypes, final Class[] componentTypes, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java index 93b35af0863..be389e894b6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/StreamReader.java @@ -18,7 +18,6 @@ public interface StreamReader { * Converts an InputStream to a BarrageMessage in the context of the provided parameters. * * @param options the options related to parsing this message - * @param expectedColumns the columns expected to appear in the stream, null implies all columns * @param columnChunkTypes the types to use for each column chunk * @param columnTypes the actual type for the column * @param componentTypes the actual component type for the column @@ -26,7 +25,6 @@ public interface StreamReader { * @return a BarrageMessage filled out by the stream's payload */ BarrageMessage safelyParseFrom(final StreamReaderOptions options, - BitSet expectedColumns, ChunkType[] columnChunkTypes, Class[] columnTypes, Class[] componentTypes, diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index 6310f2cab1f..e0e0b1f7741 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -63,8 +63,6 @@ public class BarrageSnapshotImpl extends ReferenceCountedLivenessNode implements private final BarrageTable resultTable; private final CompletableFuture

future; - private volatile BitSet expectedColumns; - private volatile int connected = 1; private static final AtomicIntegerFieldUpdater CONNECTED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(BarrageSnapshotImpl.class, "connected"); @@ -209,9 +207,6 @@ public Future
partialTable( alreadyUsed = true; } - // store this for streamreader parser - expectedColumns = columns; - // Send the snapshot request: observer.onNext(FlightData.newBuilder() .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns, reverseViewport, options))) @@ -355,8 +350,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, expectedColumns, columnChunkTypes, columnTypes, componentTypes, - stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java index 28d844780c4..0ddf9b9e87b 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java @@ -429,7 +429,7 @@ public InputStream stream(final BarrageMessage value) { @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, null, columnChunkTypes, columnTypes, componentTypes, stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } From 8e2eb966e966ce1b655753681a0ed734b67e7cc8 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 12:19:47 -0500 Subject: [PATCH 093/219] Move BYTES constant into each impl --- .../chunk/ByteChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/CharChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ChunkInputStreamGenerator.java | 22 +++++++++---------- .../DoubleChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/FloatChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/IntChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/LongChunkInputStreamGenerator.java | 16 +++++--------- .../chunk/ShortChunkInputStreamGenerator.java | 16 +++++--------- 8 files changed, 52 insertions(+), 82 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index 194e9982d95..c2cc7f3e453 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ByteConversion { } static WritableByteChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableByteChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableByteChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableByteChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ByteConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableByteChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ByteConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Byte.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 14fd3f8f79b..878bc0a6cd6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -172,7 +172,6 @@ public interface CharConversion { } static WritableCharChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -181,12 +180,11 @@ static WritableCharChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -197,7 +195,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableCharChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -220,7 +218,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableCharChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final CharConversion conversion, final Iterator fieldNodeIter, @@ -263,13 +260,13 @@ static WritableCharChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -314,7 +311,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final CharConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -333,7 +329,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -351,7 +347,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Character.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 92c62dd1d00..4a3b01b48fb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -213,36 +213,34 @@ static WritableChunk extractChunkFromInputStream( throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: return CharChunkInputStreamGenerator.extractChunkFromInputStream( - Character.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (type == Boolean.class || type == boolean.class) { return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - Byte.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - Short.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: return IntChunkInputStreamGenerator.extractChunkFromInputStream( - Integer.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { return LongChunkInputStreamGenerator.extractChunkFromInputStream( - Long.BYTES, options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - Long.BYTES, options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - Float.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - Double.BYTES, options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (type.isArray()) { if (componentType == byte.class) { @@ -347,7 +345,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalDate.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalDate.ofEpochDay(value / MS_PER_DAY), @@ -355,7 +353,7 @@ static WritableChunk extractChunkFromInputStream( } if (type == LocalTime.class) { return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - Long.BYTES, options, + options, value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index a4108804f34..c5283a02364 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface DoubleConversion { } static WritableDoubleChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableDoubleChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableDoubleChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final DoubleConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableDoubleChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final DoubleConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Double.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 82046928670..19b52593bff 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface FloatConversion { } static WritableFloatChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableFloatChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableFloatChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final FloatConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableFloatChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final FloatConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Float.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 4646cc5bdc6..91714f4dd43 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface IntConversion { } static WritableIntChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableIntChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableIntChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableIntChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final IntConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableIntChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final IntConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Integer.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index c0a76261d61..a28c4006d1d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -177,7 +177,6 @@ public interface LongConversion { } static WritableLongChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -186,12 +185,11 @@ static WritableLongChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -202,7 +200,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableLongChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -225,7 +223,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableLongChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final LongConversion conversion, final Iterator fieldNodeIter, @@ -268,13 +265,13 @@ static WritableLongChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -319,7 +316,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final LongConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -338,7 +334,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -356,7 +352,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Long.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 364b04d0d5f..68a2ecf86b1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -176,7 +176,6 @@ public interface ShortConversion { } static WritableShortChunk extractChunkFromInputStream( - final int elementSize, final StreamReaderOptions options, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, @@ -185,12 +184,11 @@ static WritableShortChunk extractChunkFromInputStream( final int outOffset, final int totalRows) throws IOException { return extractChunkFromInputStreamWithConversion( - elementSize, options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final int elementSize, final StreamReaderOptions options, final Function transform, final Iterator fieldNodeIter, @@ -201,7 +199,7 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo final int totalRows) throws IOException { try (final WritableShortChunk inner = extractChunkFromInputStream( - elementSize, options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( outChunk, @@ -224,7 +222,6 @@ static WritableObjectChunk extractChunkFromInputStreamWithTransfo } static WritableShortChunk extractChunkFromInputStreamWithConversion( - final int elementSize, final StreamReaderOptions options, final ShortConversion conversion, final Iterator fieldNodeIter, @@ -267,13 +264,13 @@ static WritableShortChunk extractChunkFromInputStreamWithConversion( } // consumed entire validity buffer by here - final long payloadRead = (long) nodeInfo.numElements * elementSize; + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); if (options.useDeephavenNulls()) { useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); } else { - useValidityBuffer(elementSize, conversion, is, nodeInfo, chunk, outOffset, isValid); + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); } final long overhangPayload = payloadBuffer - payloadRead; @@ -318,7 +315,6 @@ private static void useDeephavenNulls( } private static void useValidityBuffer( - final int elementSize, final ShortConversion conversion, final DataInput is, final FieldNodeInfo nodeInfo, @@ -337,7 +333,7 @@ private static void useValidityBuffer( do { if ((validityWord & 1) == 1) { if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); ei += pendingSkips; pendingSkips = 0; @@ -355,7 +351,7 @@ private static void useValidityBuffer( } if (pendingSkips > 0) { - is.skipBytes(pendingSkips * elementSize); + is.skipBytes(pendingSkips * Short.BYTES); chunk.fillWithNullValue(offset + ei, pendingSkips); } } From 6fc2c6c9abfa29bff93aded38180b37958f4272d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 26 Jun 2024 08:46:20 -0500 Subject: [PATCH 094/219] Make two reader methods public so they can be accessed from web --- .../barrage/chunk/FixedWidthChunkInputStreamGenerator.java | 2 +- .../barrage/chunk/VarBinaryChunkInputStreamGenerator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java index 46541c8ec1d..7b77b00911b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FixedWidthChunkInputStreamGenerator.java @@ -40,7 +40,7 @@ public interface TypeConversion { * @param the result type * @return the resulting chunk of the buffer that is read */ - static WritableObjectChunk extractChunkFromInputStreamWithTypeConversion( + public static WritableObjectChunk extractChunkFromInputStreamWithTypeConversion( final int elementSize, final StreamReaderOptions options, final TypeConversion conversion, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java index e84184be19c..34a460c31d0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarBinaryChunkInputStreamGenerator.java @@ -396,7 +396,7 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( + public static WritableObjectChunk extractChunkFromInputStream( final DataInput is, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, From 7eb176c69ad5dca47a7cbbff8d30c73d6bd20101 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 24 Jun 2024 20:36:48 -0500 Subject: [PATCH 095/219] Commit #1 reading chunks, checkpoint to talk to nate, next will try reading schema at beginning of stream --- .../chunk/ChunkInputStreamGenerator.java | 173 +-------------- .../barrage/chunk/ChunkReadingFactory.java | 116 ++++++++++ .../chunk/DefaultChunkReadingFactory.java | 207 ++++++++++++++++++ .../VarListChunkInputStreamGenerator.java | 20 +- .../VectorChunkInputStreamGenerator.java | 23 +- .../barrage/util/ArrowToTableConverter.java | 10 +- .../barrage/util/BarrageStreamReader.java | 19 +- .../client/impl/BarrageSnapshotImpl.java | 3 +- 8 files changed, 381 insertions(+), 190 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 4a3b01b48fb..98cf03908e8 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -10,7 +10,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.DefensiveDrainable; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -19,7 +18,6 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.util.SafeCloseable; -import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; import org.jetbrains.annotations.Nullable; @@ -31,7 +29,6 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZonedDateTime; -import java.util.Arrays; import java.util.Iterator; import java.util.PrimitiveIterator; @@ -187,6 +184,7 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( } } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final ChunkType chunkType, final Class type, final Class componentType, @@ -195,10 +193,10 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, - is, - outChunk, offset, totalRows); + is, outChunk, offset, totalRows); } + @Deprecated static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, @@ -207,168 +205,9 @@ static WritableChunk extractChunkFromInputStream( final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats - switch (chunkType) { - case Boolean: - throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); - case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Byte: - if (type == Boolean.class || type == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Long: - if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - case Object: - if (type.isArray()) { - if (componentType == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); - } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream( - options, type, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - } - if (Vector.class.isAssignableFrom(type)) { - // noinspection unchecked - return VectorChunkInputStreamGenerator.extractChunkFromInputStream( - options, (Class>) type, componentType, fieldNodeIter, bufferInfoIter, is, - outChunk, outOffset, totalRows); - } - if (type == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); - } - if (type == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); - } - if (type == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - if (type == String.class || - options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); - } - throw new UnsupportedOperationException( - "Do not yet support column conversion mode: " + options.columnConversionMode()); - default: - throw new UnsupportedOperationException(); - } + return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, + bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java new file mode 100644 index 00000000000..d7cfb18db00 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -0,0 +1,116 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.Type; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * + */ +public interface ChunkReadingFactory { + /** + * + */ + class ChunkTypeInfo { + private final ChunkType chunkType; + private final Class type; + private final Class componentType; + private final Field arrowField; + + public ChunkTypeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { + this.chunkType = chunkType; + this.type = type; + this.componentType = componentType; + this.arrowField = arrowField; + } + + public ChunkType chunkType() { + return chunkType; + } + + public Class type() { + return type; + } + + public Class componentType() { + return componentType; + } + + public Field arrowField() { + return arrowField; + } + + public Field componentArrowField() { + if (arrowField.typeType() != Type.List) { + throw new IllegalStateException("Not a flight List"); + } + if (arrowField.childrenLength() != 1) { + throw new IllegalStateException("Incorrect number of child Fields"); + } + return arrowField.children(0); + } + } + + /** + * + * @param options + * @param factor + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + * @throws IOException + */ + WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final int factor, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; + + /** + * + * @param options + * @param typeInfo + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param offset + * @param totalRows + * @return + * @throws IOException + */ + default WritableChunk extractChunkFromInputStream( + final StreamReaderOptions options, + final ChunkTypeInfo typeInfo, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int offset, + final int totalRows) throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, + totalRows); + } + +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java new file mode 100644 index 00000000000..e7a5bcd665c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -0,0 +1,207 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import com.google.common.base.Charsets; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.type.TypeUtils; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; + +/** + * JVM implementation of ChunkReadingFactory, suitable for use in Java clients and servers. This default implementations + * may not round trip flight types correctly, but will round trip Deephaven table definitions and table data. Neither of + * these is a required/expected property of being a Flight/Barrage/Deephaven client. + */ +public final class DefaultChunkReadingFactory implements ChunkReadingFactory { + public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); + + @Override + public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo, Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats + switch (typeInfo.chunkType()) { + case Boolean: + throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); + case Char: + return CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Byte: + if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { + return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Short: + return ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Int: + return IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Long: + if (factor == 1) { + return LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Float: + return FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Double: + return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + case Object: + if (typeInfo.type().isArray()) { + if (typeInfo.componentType() == byte.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); + } else { + return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + } + } + if (Vector.class.isAssignableFrom(typeInfo.type())) { + return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); + } + if (typeInfo.type() == BigInteger.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == BigDecimal.class) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Instant.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == ZonedDateTime.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Byte.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Character.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Double.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Float.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Integer.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Long.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == Short.class) { + return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalDate.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == LocalTime.class) { + return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + if (typeInfo.type() == String.class || + options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { + return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); + } + throw new UnsupportedOperationException( + "Do not yet support column conversion mode: " + options.columnConversionMode()); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index 0a109230ca6..e1075e7dcf3 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -235,19 +235,20 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk extractChunkFromInputStream( final StreamReaderOptions options, - final Class type, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = type.getComponentType(); + final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; final ChunkType chunkType; @@ -259,8 +260,11 @@ static WritableObjectChunk extractChunkFromInputStream( } if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, fieldNodeIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } @@ -299,8 +303,10 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, innerComponentType, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField()), fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 35245b11631..b7bb8cee6a4 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -235,25 +235,29 @@ public int drainTo(final OutputStream outputStream) throws IOException { static WritableObjectChunk, Values> extractChunkFromInputStream( final StreamReaderOptions options, - final Class> type, - final Class inComponentType, + final ChunkReadingFactory.ChunkTypeInfo typeInfo, final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, - final int totalRows) throws IOException { + final int totalRows, + ChunkReadingFactory chunkReadingFactory) throws IOException { final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = VectorExpansionKernel.getComponentType(type, inComponentType); + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { @@ -296,8 +300,11 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, chunkType, componentType, componentType.getComponentType(), fieldNodeIter, bufferInfoIter, + try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField()), + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 71f8a81b0fe..b0c62c652c1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,8 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.io.streams.ByteBufferInputStream; import io.deephaven.proto.util.Exceptions; @@ -45,6 +47,7 @@ public class ArrowToTableConverter { private Class[] columnTypes; private Class[] componentTypes; protected BarrageSubscriptionOptions options = DEFAULT_SER_OPTIONS; + private Schema schema; private volatile boolean completed = false; @@ -136,6 +139,7 @@ public synchronized void onCompleted() throws InterruptedException { } protected void parseSchema(final Schema header) { + this.schema = header; // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. @@ -194,8 +198,10 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i msg.addColumnData[ci].data = new ArrayList<>(); final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(ChunkInputStreamGenerator.extractChunkFromInputStream(options, factor, - columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], fieldNodeIter, + acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], + schema.fields(ci)), + fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index d535ffd0254..571082227db 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,6 +19,8 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; @@ -26,6 +28,7 @@ import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; +import org.apache.arrow.flatbuf.Schema; import java.io.IOException; import java.io.InputStream; @@ -51,8 +54,11 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; + private Schema schema; private BarrageMessage msg = null; + private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; } @@ -239,8 +245,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array acd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], schema.fields(ci)), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } @@ -270,8 +278,10 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // fill the chunk with data and assign back into the array mcd.data.set(lastChunkIndex, - ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], fieldNodeIter, bufferInfoIter, ois, + chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], + columnTypes[ci], componentTypes[ci], null), + fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } @@ -282,6 +292,7 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages + this.schema = (Schema) header.header(new Schema()); return null; } diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index e0e0b1f7741..8ff73e27d93 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -322,7 +322,7 @@ public MethodDescriptor getClientDoExchangeDescripto .build(); } - private class BarrageDataMarshaller implements MethodDescriptor.Marshaller { + private static class BarrageDataMarshaller implements MethodDescriptor.Marshaller { private final BarrageSnapshotOptions options; private final ChunkType[] columnChunkTypes; private final Class[] columnTypes; @@ -366,7 +366,6 @@ public void onError(@NotNull final Throwable t) { } } - /** * The Completable Future is used to encapsulate the concept that the table is filled with requested data. */ From 574b9fc5f5757d084758f25bbaa403c5af1f309b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 06:59:52 -0500 Subject: [PATCH 096/219] Commit #2, mostly mechanical changes, splitting creation and reading --- .../chunk/ChunkInputStreamGenerator.java | 6 +- .../extensions/barrage/chunk/ChunkReader.java | 34 +++ .../barrage/chunk/ChunkReadingFactory.java | 36 +-- .../chunk/DefaultChunkReadingFactory.java | 256 ++++++++++-------- .../VarListChunkInputStreamGenerator.java | 28 +- .../VectorChunkInputStreamGenerator.java | 29 +- .../barrage/util/ArrowToTableConverter.java | 26 +- .../barrage/util/BarrageStreamReader.java | 33 ++- 8 files changed, 245 insertions(+), 203 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index 98cf03908e8..a2ae09fb1d0 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -197,7 +197,7 @@ static WritableChunk extractChunkFromInputStream( } @Deprecated - static WritableChunk extractChunkFromInputStream( + private static WritableChunk extractChunkFromInputStream( final StreamReaderOptions options, final int factor, final ChunkType chunkType, final Class type, final Class componentType, @@ -206,8 +206,8 @@ static WritableChunk extractChunkFromInputStream( final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null), fieldNodeIter, - bufferInfoIter, is, outChunk, outOffset, totalRows); + new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null)) + .read(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java new file mode 100644 index 00000000000..de90744fc0d --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java @@ -0,0 +1,34 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.attributes.Values; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +/** + * Consumes Flight/Barrage streams and transforms them into WritableChunks. + */ +public interface ChunkReader { + /** + * + * @param fieldNodeIter + * @param bufferInfoIter + * @param is + * @param outChunk + * @param outOffset + * @param totalRows + * @return + */ + WritableChunk read(final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int outOffset, + final int totalRows) throws IOException; +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d7cfb18db00..d624f5bc736 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -67,50 +67,24 @@ public Field componentArrowField() { * @param options * @param factor * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param outOffset - * @param totalRows * @return * @throws IOException */ - WritableChunk extractChunkFromInputStream( + ChunkReader extractChunkFromInputStream( final StreamReaderOptions options, final int factor, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException; + final ChunkTypeInfo typeInfo) throws IOException; /** * * @param options * @param typeInfo - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param offset - * @param totalRows * @return * @throws IOException */ - default WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int offset, - final int totalRows) throws IOException { - return extractChunkFromInputStream(options, 1, typeInfo, fieldNodeIter, bufferInfoIter, is, outChunk, offset, - totalRows); + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) + throws IOException { + return extractChunkFromInputStream(options, 1, typeInfo); } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index e7a5bcd665c..6a7ceead2d2 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -36,167 +36,201 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); @Override - public WritableChunk extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo, Iterator fieldNodeIter, - PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, - int totalRows) throws IOException { + public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, + ChunkTypeInfo typeInfo) throws IOException { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Short: - return ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Int: - return IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Long: if (factor == 1) { - return LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + } + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( + options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Float: - return FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Double: - return DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( + options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), + outChunk, outOffset, totalRows); } else { - return VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, + typeInfo, fieldNodeIter, bufferInfoIter, + is, outChunk, outOffset, totalRows, this); } if (typeInfo.type() == BigInteger.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - BigInteger::new, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + BigInteger::new, + outChunk, outOffset, totalRows); } if (typeInfo.type() == BigDecimal.class) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( - is, - fieldNodeIter, - bufferInfoIter, - (final byte[] buf, final int offset, final int length) -> { - // read the int scale value as little endian, arrow's endianness. - final byte b1 = buf[offset]; - final byte b2 = buf[offset + 1]; - final byte b3 = buf[offset + 2]; - final byte b4 = buf[offset + 3]; - final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); - }, - outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( + is, + fieldNodeIter, + bufferInfoIter, + (final byte[] buf, final int offset, final int length) -> { + // read the int scale value as little endian, arrow's endianness. + final byte b1 = buf[offset]; + final byte b2 = buf[offset + 1]; + final byte b3 = buf[offset + 2]; + final byte b4 = buf[offset + 3]; + final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); + return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + }, + outChunk, outOffset, totalRows); } if (typeInfo.type() == Instant.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToInstant(value * factor); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToInstant(value * factor); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == ZonedDateTime.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> { - final long value = io.readLong(); - if (value == QueryConstants.NULL_LONG) { - return null; - } - return DateTimeUtils.epochNanosToZonedDateTime( - value * factor, DateTimeUtils.timeZone()); - }, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> { + final long value = io.readLong(); + if (value == QueryConstants.NULL_LONG) { + return null; + } + return DateTimeUtils.epochNanosToZonedDateTime( + value * factor, DateTimeUtils.timeZone()); + }, + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Byte.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Byte.BYTES, options, io -> TypeUtils.box(io.readByte()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Character.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Character.BYTES, options, io -> TypeUtils.box(io.readChar()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Character.BYTES, options, io -> TypeUtils.box(io.readChar()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Double.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Double.BYTES, options, io -> TypeUtils.box(io.readDouble()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Float.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Float.BYTES, options, io -> TypeUtils.box(io.readFloat()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Integer.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Integer.BYTES, options, io -> TypeUtils.box(io.readInt()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Long.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Long.BYTES, options, io -> TypeUtils.box(io.readLong()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Long.BYTES, options, io -> TypeUtils.box(io.readLong()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == Short.class) { - return FixedWidthChunkInputStreamGenerator.extractChunkFromInputStreamWithTypeConversion( - Short.BYTES, options, io -> TypeUtils.box(io.readShort()), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> FixedWidthChunkInputStreamGenerator + .extractChunkFromInputStreamWithTypeConversion( + Short.BYTES, options, io -> TypeUtils.box(io.readShort()), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG + ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalTime.class) { - return LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( + options, + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), + fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { - return VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, fieldNodeIter, - bufferInfoIter, - (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, - totalRows); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, + totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream(is, + fieldNodeIter, + bufferInfoIter, + (buf, off, len) -> new String(buf, off, len, Charsets.UTF_8), outChunk, outOffset, + totalRows); } throw new UnsupportedOperationException( "Do not yet support column conversion mode: " + options.columnConversionMode()); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index e1075e7dcf3..a1de362f385 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -244,10 +244,6 @@ static WritableObjectChunk extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; @@ -259,13 +255,18 @@ static WritableObjectChunk extractChunkFromInputStream( chunkType = ChunkType.fromElementType(componentType); } + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, - bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } } @@ -303,11 +304,8 @@ static WritableObjectChunk extractChunkFromInputStream( } final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index b7bb8cee6a4..5ee91971c5f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -244,22 +244,22 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( final int totalRows, ChunkReadingFactory chunkReadingFactory) throws IOException { + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + final FieldNodeInfo nodeInfo = fieldNodeIter.next(); final long validityBuffer = bufferInfoIter.nextLong(); final long offsetsBuffer = bufferInfoIter.nextLong(); - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { return outChunk.asWritableObjectChunk(); } @@ -300,13 +300,8 @@ static WritableObjectChunk, Values> extractChunkFromInputStream( } final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField()), - fieldNodeIter, bufferInfoIter, - is, - null, 0, 0)) { + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index b0c62c652c1..04e257263a9 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; @@ -31,6 +32,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import static io.deephaven.extensions.barrage.util.BarrageProtoUtil.DEFAULT_SER_OPTIONS; @@ -42,12 +44,10 @@ public class ArrowToTableConverter { protected long totalRowsRead = 0; protected BarrageTable resultTable; - private ChunkType[] columnChunkTypes; - private int[] columnConversionFactors; private Class[] columnTypes; private Class[] componentTypes; protected BarrageSubscriptionOptions options = DEFAULT_SER_OPTIONS; - private Schema schema; + private final List readers = new ArrayList<>(); private volatile boolean completed = false; @@ -139,7 +139,6 @@ public synchronized void onCompleted() throws InterruptedException { } protected void parseSchema(final Schema header) { - this.schema = header; // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. @@ -151,10 +150,18 @@ protected void parseSchema(final Schema header) { resultTable = BarrageTable.make(null, result.tableDef, result.attributes, null); resultTable.setFlat(); - columnConversionFactors = result.conversionFactors; - columnChunkTypes = result.computeWireChunkTypes(); + ChunkType[] columnChunkTypes = result.computeWireChunkTypes(); columnTypes = result.computeWireTypes(); componentTypes = result.computeWireComponentTypes(); + // TODO see the note above, this is not safe since the buffer originated in python - we need to copy the schema + // before doing this + for (int i = 0; i < header.fieldsLength(); i++) { + final int factor = (result.conversionFactors == null) ? 1 : result.conversionFactors[i]; + ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], + header.fields(i))); + readers.add(reader); + } // retain reference until the resultTable can be sealed resultTable.retainReference(); @@ -196,13 +203,8 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i final BarrageMessage.AddColumnData acd = new BarrageMessage.AddColumnData(); msg.addColumnData[ci] = acd; msg.addColumnData[ci].data = new ArrayList<>(); - final int factor = (columnConversionFactors == null) ? 1 : columnConversionFactors[ci]; try { - acd.data.add(DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], columnTypes[ci], componentTypes[ci], - schema.fields(ci)), - fieldNodeIter, - bufferInfoIter, mi.inputStream, null, 0, 0)); + acd.data.add(readers.get(ci).read(fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 571082227db..b38b1eedd57 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -19,12 +19,14 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; +import io.deephaven.extensions.barrage.chunk.ChunkReader; import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; +import org.apache.arrow.flatbuf.Field; import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; @@ -37,6 +39,7 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import java.util.function.LongConsumer; @@ -54,10 +57,10 @@ public class BarrageStreamReader implements StreamReader { private long numModRowsRead = 0; private long numModRowsTotal = 0; - private Schema schema; private BarrageMessage msg = null; private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + private final List readers = new ArrayList<>(); public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { this.deserializeTmConsumer = deserializeTmConsumer; @@ -244,12 +247,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - acd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], schema.fields(ci)), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), (int) batch.length())); + acd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } numAddRowsRead += batch.length(); @@ -277,12 +276,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - mcd.data.set(lastChunkIndex, - chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[ci], - columnTypes[ci], componentTypes[ci], null), - fieldNodeIter, bufferInfoIter, ois, - chunk, chunk.size(), numRowsToRead)); + mcd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } numModRowsRead += batch.length(); @@ -292,7 +287,17 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, if (header != null && header.headerType() == MessageHeader.Schema) { // there is no body and our clients do not want to see schema messages - this.schema = (Schema) header.header(new Schema()); + Schema schema = new Schema(); + header.header(schema); + for (int i = 0; i < schema.fieldsLength(); i++) { + // TODO as with ArrowToTableConverter, see about copying the bytebuffer so we control the payload + // ourselves + Field field = schema.fields(i); + ChunkReader chunkReader = chunkReadingFactory.extractChunkFromInputStream(options, + new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], + componentTypes[i], field)); + readers.add(chunkReader); + } return null; } From 117c94f21f834100bac61c20285ae7b01c23f3b7 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 08:06:43 -0500 Subject: [PATCH 097/219] Commit #3, create vector/array chunk readers to do type lookups once --- .../barrage/chunk/ChunkReadingFactory.java | 11 +- .../chunk/DefaultChunkReadingFactory.java | 18 +-- .../VarListChunkInputStreamGenerator.java | 90 -------------- .../barrage/chunk/VarListChunkReader.java | 114 ++++++++++++++++++ .../VectorChunkInputStreamGenerator.java | 87 ------------- .../barrage/chunk/VectorChunkReader.java | 110 +++++++++++++++++ 6 files changed, 230 insertions(+), 200 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java index d624f5bc736..d3f8ba84a95 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java @@ -68,22 +68,17 @@ public Field componentArrowField() { * @param factor * @param typeInfo * @return - * @throws IOException */ - ChunkReader extractChunkFromInputStream( - final StreamReaderOptions options, - final int factor, - final ChunkTypeInfo typeInfo) throws IOException; + ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final int factor, + final ChunkTypeInfo typeInfo); /** * * @param options * @param typeInfo * @return - * @throws IOException */ - default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) - throws IOException { + default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) { return extractChunkFromInputStream(options, 1, typeInfo); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index 6a7ceead2d2..df41a1ae7ca 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -4,8 +4,6 @@ package io.deephaven.extensions.barrage.chunk; import com.google.common.base.Charsets; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.time.DateTimeUtils; @@ -13,8 +11,6 @@ import io.deephaven.util.type.TypeUtils; import io.deephaven.vector.Vector; -import java.io.DataInput; -import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Instant; @@ -22,8 +18,6 @@ import java.time.LocalTime; import java.time.ZonedDateTime; import java.util.Arrays; -import java.util.Iterator; -import java.util.PrimitiveIterator; import static io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator.MS_PER_DAY; @@ -37,7 +31,7 @@ public final class DefaultChunkReadingFactory implements ChunkReadingFactory { @Override public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo) throws IOException { + ChunkTypeInfo typeInfo) { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: @@ -93,17 +87,11 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int (buf, off, len) -> Arrays.copyOfRange(buf, off, off + len), outChunk, outOffset, totalRows); } else { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VarListChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows, this); + return new VarListChunkReader<>(options, typeInfo, this); } } if (Vector.class.isAssignableFrom(typeInfo.type())) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> VectorChunkInputStreamGenerator.extractChunkFromInputStream(options, - typeInfo, fieldNodeIter, bufferInfoIter, - is, outChunk, outOffset, totalRows, this); + return new VectorChunkReader(options, typeInfo, this); } if (typeInfo.type() == BigInteger.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index a1de362f385..470b0b87291 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -233,95 +233,5 @@ public int drainTo(final OutputStream outputStream) throws IOException { } } - static WritableObjectChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = typeInfo.type().getComponentType(); - final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; - - final ChunkType chunkType; - if (componentType == boolean.class || componentType == Boolean.class) { - // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. - chunkType = ChunkType.Byte; - } else { - chunkType = ChunkType.fromElementType(componentType); - } - - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); - } - } - - final WritableObjectChunk chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final ArrayExpansionKernel kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java new file mode 100644 index 00000000000..71c294d6387 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java @@ -0,0 +1,114 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.array.ArrayExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +public class VarListChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VarListChunkReader"; + + private final ArrayExpansionKernel kernel; + private final ChunkReader componentReader; + + public VarListChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + final Class componentType = typeInfo.type().getComponentType(); + final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; + + final ChunkType chunkType; + if (componentType == boolean.class || componentType == Boolean.class) { + // Note: Internally booleans are passed around as bytes, but the wire format is packed bits. + chunkType = ChunkType.Byte; + } else { + chunkType = ChunkType.fromElementType(componentType); + } + kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); + + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, + typeInfo.componentArrowField())); + } + + @Override + public WritableObjectChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); + } + } + + final WritableObjectChunk chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index 5ee91971c5f..620ae84541f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -232,91 +232,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - static WritableObjectChunk, Values> extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkReadingFactory.ChunkTypeInfo typeInfo, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows, - ChunkReadingFactory chunkReadingFactory) throws IOException { - - final Class componentType = - VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); - final ChunkType chunkType = ChunkType.fromElementType(componentType); - ChunkReader componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), - typeInfo.componentArrowField())); - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long offsetsBuffer = bufferInfoIter.nextLong(); - - - if (nodeInfo.numElements == 0) { - try (final WritableChunk ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - if (outChunk != null) { - return outChunk.asWritableObjectChunk(); - } - return WritableObjectChunk.makeWritableChunk(totalRows); - } - } - - final WritableObjectChunk, Values> chunk; - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); - final WritableIntChunk offsets = - WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { - // Read validity buffer: - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - // Read offsets: - final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; - if (offsetsBuffer < offBufRead) { - throw new IllegalStateException("offset buffer is too short for the expected number of elements"); - } - for (int i = 0; i < nodeInfo.numElements + 1; ++i) { - offsets.set(i, is.readInt()); - } - if (offBufRead < offsetsBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); - } - - final VectorExpansionKernel kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); - try (final WritableChunk inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); - - long nextValid = 0; - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - if ((ii % 64) == 0) { - nextValid = isValid.get(ii / 64); - } - if ((nextValid & 0x1) == 0x0) { - chunk.set(outOffset + ii, null); - } - nextValid >>= 1; - } - } - } - - return chunk; - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java new file mode 100644 index 00000000000..decf9419d9d --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java @@ -0,0 +1,110 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.chunk.vector.VectorExpansionKernel; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; +import io.deephaven.vector.Vector; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +public class VectorChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "VectorChunkReader"; + private final ChunkReader componentReader; + private final VectorExpansionKernel kernel; + + public VectorChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, + ChunkReadingFactory chunkReadingFactory) { + + final Class componentType = + VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); + final ChunkType chunkType = ChunkType.fromElementType(componentType); + componentReader = chunkReadingFactory.extractChunkFromInputStream( + options, + new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + typeInfo.componentArrowField())); + kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); + } + + @Override + public WritableObjectChunk, Values> read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long offsetsBuffer = bufferInfoIter.nextLong(); + + if (nodeInfo.numElements == 0) { + try (final WritableChunk ignored = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + if (outChunk != null) { + return outChunk.asWritableObjectChunk(); + } + return WritableObjectChunk.makeWritableChunk(totalRows); + } + } + + final WritableObjectChunk, Values> chunk; + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs); + final WritableIntChunk offsets = + WritableIntChunk.makeWritableChunk(nodeInfo.numElements + 1)) { + // Read validity buffer: + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + // Read offsets: + final long offBufRead = (nodeInfo.numElements + 1L) * Integer.BYTES; + if (offsetsBuffer < offBufRead) { + throw new IllegalStateException("offset buffer is too short for the expected number of elements"); + } + for (int i = 0; i < nodeInfo.numElements + 1; ++i) { + offsets.set(i, is.readInt()); + } + if (offBufRead < offsetsBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, offsetsBuffer - offBufRead)); + } + + try (final WritableChunk inner = + componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); + + long nextValid = 0; + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + if ((ii % 64) == 0) { + nextValid = isValid.get(ii / 64); + } + if ((nextValid & 0x1) == 0x0) { + chunk.set(outOffset + ii, null); + } + nextValid >>= 1; + } + } + } + + return chunk; + } +} From cd2039f439eb511a3e66de63b88c74867d6471f4 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:44:40 -0500 Subject: [PATCH 098/219] Commit #4, replicate new chunk readers for primitives --- .../chunk/ByteChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ByteChunkReader.java | 204 ++++++++++++++++++ .../chunk/CharChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/CharChunkReader.java | 200 +++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 51 ++--- .../DoubleChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/DoubleChunkReader.java | 204 ++++++++++++++++++ .../chunk/FloatChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/FloatChunkReader.java | 204 ++++++++++++++++++ .../chunk/IntChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/IntChunkReader.java | 204 ++++++++++++++++++ .../chunk/LongChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/LongChunkReader.java | 204 ++++++++++++++++++ .../chunk/ShortChunkInputStreamGenerator.java | 197 ----------------- .../barrage/chunk/ShortChunkReader.java | 204 ++++++++++++++++++ .../replicators/ReplicateBarrageUtils.java | 3 + 16 files changed, 1440 insertions(+), 1417 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java index c2cc7f3e453..d334e031bed 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToByteFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.WritableByteChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ByteChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableByteChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableByteChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - byte value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableByteChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableByteChunk::makeWritableChunk, - WritableChunk::asWritableByteChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset) throws IOException { - if (conversion == ByteConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readByte()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final byte in = is.readByte(); - final byte out = in == NULL_BYTE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readByte())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Byte.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java new file mode 100644 index 00000000000..29bee0fea05 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_BYTE; + +public class ByteChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ByteChunkReader"; + private final StreamReaderOptions options; + private final ByteConversion conversion; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + public ByteChunkReader(StreamReaderOptions options) { + this(options, ByteConversion.IDENTITY); + } + + public ByteChunkReader(StreamReaderOptions options, ByteConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableByteChunk inner = ByteChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + byte value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableByteChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableByteChunk::makeWritableChunk, + WritableChunk::asWritableByteChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Byte.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset) throws IOException { + if (conversion == ByteConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readByte()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final byte in = is.readByte(); + final byte out = in == NULL_BYTE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readByte())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Byte.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java index 878bc0a6cd6..83b1f2f72f1 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkInputStreamGenerator.java @@ -3,10 +3,7 @@ // package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToCharFunction; @@ -17,17 +14,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.CharChunk; import io.deephaven.chunk.WritableCharChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -163,192 +154,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("CharChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface CharConversion { - char apply(char in); - - CharConversion IDENTITY = (char a) -> a; - } - - static WritableCharChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, CharConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableCharChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - char value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableCharChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final CharConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableCharChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableCharChunk::makeWritableChunk, - WritableChunk::asWritableCharChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset) throws IOException { - if (conversion == CharConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readChar()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final char in = is.readChar(); - final char out = in == NULL_CHAR ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final CharConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableCharChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readChar())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Character.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java new file mode 100644 index 00000000000..b6fce96ffbf --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java @@ -0,0 +1,200 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableCharChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_CHAR; + +public class CharChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "CharChunkReader"; + private final StreamReaderOptions options; + private final CharConversion conversion; + + @FunctionalInterface + public interface CharConversion { + char apply(char in); + + CharConversion IDENTITY = (char a) -> a; + } + + public CharChunkReader(StreamReaderOptions options) { + this(options, CharConversion.IDENTITY); + } + + public CharChunkReader(StreamReaderOptions options, CharConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableCharChunk inner = CharChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + char value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableCharChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableCharChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableCharChunk::makeWritableChunk, + WritableChunk::asWritableCharChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Character.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset) throws IOException { + if (conversion == CharConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readChar()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final char in = is.readChar(); + final char out = in == NULL_CHAR ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final CharConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableCharChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readChar())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Character.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index df41a1ae7ca..bbf5b398fe7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -37,45 +37,28 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int case Boolean: throw new UnsupportedOperationException("Booleans are reinterpreted as bytes"); case Char: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> CharChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ByteChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ByteChunkReader(options); case Short: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> ShortChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new ShortChunkReader(options); case Int: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> IntChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new IntChunkReader(options); case Long: if (factor == 1) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options); } - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithConversion( - options, - (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options, + (long v) -> v == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : (v * factor)); case Float: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> FloatChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new FloatChunkReader(options); case Double: - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> DoubleChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new DoubleChunkReader(options); case Object: if (typeInfo.type().isArray()) { if (typeInfo.componentType() == byte.class) { @@ -196,20 +179,12 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } if (typeInfo.type() == LocalDate.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG - ? null - : LocalDate.ofEpochDay(value / MS_PER_DAY), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform(value -> value == QueryConstants.NULL_LONG ? null + : LocalDate.ofEpochDay(value / MS_PER_DAY)); } if (typeInfo.type() == LocalTime.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> LongChunkInputStreamGenerator.extractChunkFromInputStreamWithTransform( - options, - value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value), - fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new LongChunkReader(options).transform( + value -> value == QueryConstants.NULL_LONG ? null : LocalTime.ofNanoOfDay(value)); } if (typeInfo.type() == String.class || options.columnConversionMode().equals(ColumnConversionMode.Stringify)) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java index c5283a02364..a0046b67edb 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToDoubleFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.WritableDoubleChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("DoubleChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface DoubleConversion { - double apply(double in); - - DoubleConversion IDENTITY = (double a) -> a; - } - - static WritableDoubleChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, DoubleConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableDoubleChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - double value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableDoubleChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final DoubleConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableDoubleChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableDoubleChunk::makeWritableChunk, - WritableChunk::asWritableDoubleChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset) throws IOException { - if (conversion == DoubleConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readDouble()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final double in = is.readDouble(); - final double out = in == NULL_DOUBLE ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final DoubleConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableDoubleChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readDouble())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Double.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java new file mode 100644 index 00000000000..4b72273272b --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableDoubleChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_DOUBLE; + +public class DoubleChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "DoubleChunkReader"; + private final StreamReaderOptions options; + private final DoubleConversion conversion; + + @FunctionalInterface + public interface DoubleConversion { + double apply(double in); + + DoubleConversion IDENTITY = (double a) -> a; + } + + public DoubleChunkReader(StreamReaderOptions options) { + this(options, DoubleConversion.IDENTITY); + } + + public DoubleChunkReader(StreamReaderOptions options, DoubleConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableDoubleChunk inner = DoubleChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + double value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableDoubleChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableDoubleChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableDoubleChunk::makeWritableChunk, + WritableChunk::asWritableDoubleChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Double.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset) throws IOException { + if (conversion == DoubleConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readDouble()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final double in = is.readDouble(); + final double out = in == NULL_DOUBLE ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final DoubleConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableDoubleChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readDouble())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Double.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java index 19b52593bff..edd8aaccb2a 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToFloatFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.FloatChunk; import io.deephaven.chunk.WritableFloatChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("FloatChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface FloatConversion { - float apply(float in); - - FloatConversion IDENTITY = (float a) -> a; - } - - static WritableFloatChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, FloatConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableFloatChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - float value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableFloatChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final FloatConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableFloatChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableFloatChunk::makeWritableChunk, - WritableChunk::asWritableFloatChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset) throws IOException { - if (conversion == FloatConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readFloat()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final float in = is.readFloat(); - final float out = in == NULL_FLOAT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final FloatConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableFloatChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readFloat())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Float.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java new file mode 100644 index 00000000000..6d434226235 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableFloatChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_FLOAT; + +public class FloatChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "FloatChunkReader"; + private final StreamReaderOptions options; + private final FloatConversion conversion; + + @FunctionalInterface + public interface FloatConversion { + float apply(float in); + + FloatConversion IDENTITY = (float a) -> a; + } + + public FloatChunkReader(StreamReaderOptions options) { + this(options, FloatConversion.IDENTITY); + } + + public FloatChunkReader(StreamReaderOptions options, FloatConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableFloatChunk inner = FloatChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + float value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableFloatChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableFloatChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableFloatChunk::makeWritableChunk, + WritableChunk::asWritableFloatChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Float.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset) throws IOException { + if (conversion == FloatConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readFloat()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final float in = is.readFloat(); + final float out = in == NULL_FLOAT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final FloatConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableFloatChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readFloat())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Float.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java index 91714f4dd43..87bc61b8c6d 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToIntFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.WritableIntChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("IntChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface IntConversion { - int apply(int in); - - IntConversion IDENTITY = (int a) -> a; - } - - static WritableIntChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, IntConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableIntChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - int value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableIntChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final IntConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableIntChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableIntChunk::makeWritableChunk, - WritableChunk::asWritableIntChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset) throws IOException { - if (conversion == IntConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readInt()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final int in = is.readInt(); - final int out = in == NULL_INT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final IntConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableIntChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readInt())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Integer.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java new file mode 100644 index 00000000000..39bce48735c --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_INT; + +public class IntChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "IntChunkReader"; + private final StreamReaderOptions options; + private final IntConversion conversion; + + @FunctionalInterface + public interface IntConversion { + int apply(int in); + + IntConversion IDENTITY = (int a) -> a; + } + + public IntChunkReader(StreamReaderOptions options) { + this(options, IntConversion.IDENTITY); + } + + public IntChunkReader(StreamReaderOptions options, IntConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableIntChunk inner = IntChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + int value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableIntChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableIntChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableIntChunk::makeWritableChunk, + WritableChunk::asWritableIntChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Integer.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset) throws IOException { + if (conversion == IntConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readInt()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final int in = is.readInt(); + final int out = in == NULL_INT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final IntConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableIntChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readInt())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Integer.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java index a28c4006d1d..671d972ccce 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkInputStreamGenerator.java @@ -9,10 +9,7 @@ import java.util.function.ToLongFunction; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.rowset.RowSet; @@ -22,17 +19,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableLongChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -168,192 +159,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("LongChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface LongConversion { - long apply(long in); - - LongConversion IDENTITY = (long a) -> a; - } - - static WritableLongChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, LongConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableLongChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - long value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableLongChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final LongConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableLongChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableLongChunk::makeWritableChunk, - WritableChunk::asWritableLongChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset) throws IOException { - if (conversion == LongConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readLong()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final long in = is.readLong(); - final long out = in == NULL_LONG ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final LongConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableLongChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readLong())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Long.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java new file mode 100644 index 00000000000..743e0a37c8f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_LONG; + +public class LongChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "LongChunkReader"; + private final StreamReaderOptions options; + private final LongConversion conversion; + + @FunctionalInterface + public interface LongConversion { + long apply(long in); + + LongConversion IDENTITY = (long a) -> a; + } + + public LongChunkReader(StreamReaderOptions options) { + this(options, LongConversion.IDENTITY); + } + + public LongChunkReader(StreamReaderOptions options, LongConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableLongChunk inner = LongChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + long value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableLongChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableLongChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableLongChunk::makeWritableChunk, + WritableChunk::asWritableLongChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Long.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset) throws IOException { + if (conversion == LongConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readLong()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final long in = is.readLong(); + final long out = in == NULL_LONG ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final LongConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableLongChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readLong())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Long.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java index 68a2ecf86b1..4fd81b47d03 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkInputStreamGenerator.java @@ -7,10 +7,7 @@ // @formatter:off package io.deephaven.extensions.barrage.chunk; -import io.deephaven.base.verify.Assert; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; import io.deephaven.engine.primitive.function.ToShortFunction; @@ -21,17 +18,11 @@ import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.WritableShortChunk; -import io.deephaven.chunk.WritableLongChunk; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.io.OutputStream; -import java.util.Iterator; -import java.util.PrimitiveIterator; -import java.util.function.Function; -import java.util.function.IntFunction; import static io.deephaven.util.QueryConstants.*; @@ -167,192 +158,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize("ShortChunkInputStreamGenerator", bytesWritten); } } - - @FunctionalInterface - public interface ShortConversion { - short apply(short in); - - ShortConversion IDENTITY = (short a) -> a; - } - - static WritableShortChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ShortConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows); - } - - static WritableObjectChunk extractChunkFromInputStreamWithTransform( - final StreamReaderOptions options, - final Function transform, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - try (final WritableShortChunk inner = extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { - - final WritableObjectChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, inner.size()), - WritableObjectChunk::makeWritableChunk, - WritableChunk::asWritableObjectChunk); - - if (outChunk == null) { - // if we're not given an output chunk then we better be writing at the front of the new one - Assert.eqZero(outOffset, "outOffset"); - } - - for (int ii = 0; ii < inner.size(); ++ii) { - short value = inner.get(ii); - chunk.set(outOffset + ii, transform.apply(value)); - } - - return chunk; - } - } - - static WritableShortChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ShortConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableShortChunk chunk = castOrCreateChunk( - outChunk, - Math.max(totalRows, nodeInfo.numElements), - WritableShortChunk::makeWritableChunk, - WritableChunk::asWritableShortChunk); - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - if (options.useDeephavenNulls() && validityBuffer != 0) { - throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); - } - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; - Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); - - if (options.useDeephavenNulls()) { - useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); - } else { - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - } - - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static > T castOrCreateChunk( - final WritableChunk outChunk, - final int numRows, - final IntFunction chunkFactory, - final Function, T> castFunction) { - if (outChunk != null) { - return castFunction.apply(outChunk); - } - final T newChunk = chunkFactory.apply(numRows); - newChunk.setSize(numRows); - return newChunk; - } - - private static void useDeephavenNulls( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset) throws IOException { - if (conversion == ShortConversion.IDENTITY) { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - chunk.set(offset + ii, is.readShort()); - } - } else { - for (int ii = 0; ii < nodeInfo.numElements; ++ii) { - final short in = is.readShort(); - final short out = in == NULL_SHORT ? in : conversion.apply(in); - chunk.set(offset + ii, out); - } - } - } - - private static void useValidityBuffer( - final ShortConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableShortChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - chunk.set(offset + ei++, conversion.apply(is.readShort())); - validityWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - is.skipBytes(pendingSkips * Short.BYTES); - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java new file mode 100644 index 00000000000..56c17c2c11f --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java @@ -0,0 +1,204 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit CharChunkReader and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableShortChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; +import java.util.function.Function; +import java.util.function.IntFunction; + +import static io.deephaven.util.QueryConstants.NULL_SHORT; + +public class ShortChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "ShortChunkReader"; + private final StreamReaderOptions options; + private final ShortConversion conversion; + + @FunctionalInterface + public interface ShortConversion { + short apply(short in); + + ShortConversion IDENTITY = (short a) -> a; + } + + public ShortChunkReader(StreamReaderOptions options) { + this(options, ShortConversion.IDENTITY); + } + + public ShortChunkReader(StreamReaderOptions options, ShortConversion conversion) { + this.options = options; + this.conversion = conversion; + } + + public ChunkReader transform(Function transform) { + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableShortChunk inner = ShortChunkReader.this.read( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, inner.size()), + WritableObjectChunk::makeWritableChunk, + WritableChunk::asWritableObjectChunk); + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + short value = inner.get(ii); + chunk.set(outOffset + ii, transform.apply(value)); + } + + return chunk; + } + }; + } + + @Override + public WritableShortChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableShortChunk chunk = castOrCreateChunk( + outChunk, + Math.max(totalRows, nodeInfo.numElements), + WritableShortChunk::makeWritableChunk, + WritableChunk::asWritableShortChunk); + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = options.useDeephavenNulls() ? 0 : (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + if (options.useDeephavenNulls() && validityBuffer != 0) { + throw new IllegalStateException("validity buffer is non-empty, but is unnecessary"); + } + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final long payloadRead = (long) nodeInfo.numElements * Short.BYTES; + Assert.geq(payloadBuffer, "payloadBuffer", payloadRead, "payloadRead"); + + if (options.useDeephavenNulls()) { + useDeephavenNulls(conversion, is, nodeInfo, chunk, outOffset); + } else { + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + } + + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + private static > T castOrCreateChunk( + final WritableChunk outChunk, + final int numRows, + final IntFunction chunkFactory, + final Function, T> castFunction) { + if (outChunk != null) { + return castFunction.apply(outChunk); + } + final T newChunk = chunkFactory.apply(numRows); + newChunk.setSize(numRows); + return newChunk; + } + + private static void useDeephavenNulls( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset) throws IOException { + if (conversion == ShortConversion.IDENTITY) { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + chunk.set(offset + ii, is.readShort()); + } + } else { + for (int ii = 0; ii < nodeInfo.numElements; ++ii) { + final short in = is.readShort(); + final short out = in == NULL_SHORT ? in : conversion.apply(in); + chunk.set(offset + ii, out); + } + } + } + + private static void useValidityBuffer( + final ShortConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableShortChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + chunk.set(offset + ei++, conversion.apply(is.readShort())); + validityWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + is.skipBytes(pendingSkips * Short.BYTES); + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java index 63670ad93d7..6824f8d91f9 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java @@ -25,6 +25,9 @@ public static void main(final String[] args) throws IOException { fixupChunkInputStreamGen(CHUNK_PACKAGE + "/LongChunkInputStreamGenerator.java", "Long"); fixupChunkInputStreamGen(CHUNK_PACKAGE + "/DoubleChunkInputStreamGenerator.java", "Double"); + ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", + CHUNK_PACKAGE + "/CharChunkReader.java"); + ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", CHUNK_PACKAGE + "/array/CharArrayExpansionKernel.java"); From 5d51345b9326b95cde375ace6cfdbb86e30b6724 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 25 Jun 2024 13:56:21 -0500 Subject: [PATCH 099/219] Commit #5, also boolean chunk reader --- .../BooleanChunkInputStreamGenerator.java | 126 ---------------- .../barrage/chunk/BooleanChunkReader.java | 141 ++++++++++++++++++ .../chunk/DefaultChunkReadingFactory.java | 4 +- 3 files changed, 142 insertions(+), 129 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java index 8c1503b67f8..e1f478ef5ef 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkInputStreamGenerator.java @@ -153,130 +153,4 @@ public int drainTo(final OutputStream outputStream) throws IOException { return LongSizedDataStructure.intSize(DEBUG_NAME, bytesWritten); } } - - @FunctionalInterface - public interface ByteConversion { - byte apply(byte in); - - ByteConversion IDENTITY = (byte a) -> a; - } - - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - return extractChunkFromInputStreamWithConversion( - options, ByteConversion.IDENTITY, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - - static WritableChunk extractChunkFromInputStreamWithConversion( - final StreamReaderOptions options, - final ByteConversion conversion, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, - final int outOffset, - final int totalRows) throws IOException { - - final FieldNodeInfo nodeInfo = fieldNodeIter.next(); - final long validityBuffer = bufferInfoIter.nextLong(); - final long payloadBuffer = bufferInfoIter.nextLong(); - - final WritableByteChunk chunk; - if (outChunk != null) { - chunk = outChunk.asWritableByteChunk(); - } else { - final int numRows = Math.max(totalRows, nodeInfo.numElements); - chunk = WritableByteChunk.makeWritableChunk(numRows); - chunk.setSize(numRows); - } - - if (nodeInfo.numElements == 0) { - return chunk; - } - - final int numValidityLongs = (nodeInfo.numElements + 63) / 64; - try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { - int jj = 0; - for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { - isValid.set(jj, is.readLong()); - } - final long valBufRead = jj * 8L; - if (valBufRead < validityBuffer) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); - } - // we support short validity buffers - for (; jj < numValidityLongs; ++jj) { - isValid.set(jj, -1); // -1 is bit-wise representation of all ones - } - // consumed entire validity buffer by here - - final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); - if (payloadBuffer < numPayloadBytesNeeded) { - throw new IllegalStateException("payload buffer is too short for expected number of elements"); - } - - // cannot use deephaven nulls as booleans are not nullable - useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); - - // flight requires that the payload buffer be padded to multiples of 8 bytes - final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; - final long overhangPayload = payloadBuffer - payloadRead; - if (overhangPayload > 0) { - is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); - } - } - - return chunk; - } - - private static void useValidityBuffer( - final ByteConversion conversion, - final DataInput is, - final FieldNodeInfo nodeInfo, - final WritableByteChunk chunk, - final int offset, - final WritableLongChunk isValid) throws IOException { - final int numElements = nodeInfo.numElements; - final int numValidityWords = (numElements + 63) / 64; - - int ei = 0; - int pendingSkips = 0; - - for (int vi = 0; vi < numValidityWords; ++vi) { - int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); - long validityWord = isValid.get(vi); - long payloadWord = is.readLong(); - do { - if ((validityWord & 1) == 1) { - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - ei += pendingSkips; - pendingSkips = 0; - } - final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE - : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; - chunk.set(offset + ei++, conversion.apply(value)); - validityWord >>= 1; - payloadWord >>= 1; - bitsLeftInThisWord--; - } else { - final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); - pendingSkips += skips; - validityWord >>= skips; - payloadWord >>= skips; - bitsLeftInThisWord -= skips; - } - } while (bitsLeftInThisWord > 0); - } - - if (pendingSkips > 0) { - chunk.fillWithNullValue(offset + ei, pendingSkips); - } - } } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java new file mode 100644 index 00000000000..da0cc96cad4 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java @@ -0,0 +1,141 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableChunk; +import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.chunk.attributes.Values; +import io.deephaven.util.BooleanUtils; +import io.deephaven.util.datastructures.LongSizedDataStructure; + +import java.io.DataInput; +import java.io.IOException; +import java.util.Iterator; +import java.util.PrimitiveIterator; + +import static io.deephaven.extensions.barrage.chunk.BaseChunkInputStreamGenerator.getNumLongsForBitPackOfSize; + +public class BooleanChunkReader implements ChunkReader { + private static final String DEBUG_NAME = "BooleanChunkReader"; + + @FunctionalInterface + public interface ByteConversion { + byte apply(byte in); + + ByteConversion IDENTITY = (byte a) -> a; + } + + private final ByteConversion conversion; + + public BooleanChunkReader() { + this(ByteConversion.IDENTITY); + } + + public BooleanChunkReader(ByteConversion conversion) { + this.conversion = conversion; + } + + @Override + public WritableChunk read(Iterator fieldNodeIter, + PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, + int totalRows) throws IOException { + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); + final long validityBuffer = bufferInfoIter.nextLong(); + final long payloadBuffer = bufferInfoIter.nextLong(); + + final WritableByteChunk chunk; + if (outChunk != null) { + chunk = outChunk.asWritableByteChunk(); + } else { + final int numRows = Math.max(totalRows, nodeInfo.numElements); + chunk = WritableByteChunk.makeWritableChunk(numRows); + chunk.setSize(numRows); + } + + if (nodeInfo.numElements == 0) { + return chunk; + } + + final int numValidityLongs = (nodeInfo.numElements + 63) / 64; + try (final WritableLongChunk isValid = WritableLongChunk.makeWritableChunk(numValidityLongs)) { + int jj = 0; + for (; jj < Math.min(numValidityLongs, validityBuffer / 8); ++jj) { + isValid.set(jj, is.readLong()); + } + final long valBufRead = jj * 8L; + if (valBufRead < validityBuffer) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, validityBuffer - valBufRead)); + } + // we support short validity buffers + for (; jj < numValidityLongs; ++jj) { + isValid.set(jj, -1); // -1 is bit-wise representation of all ones + } + // consumed entire validity buffer by here + + final int numPayloadBytesNeeded = (int) ((nodeInfo.numElements + 7L) / 8L); + if (payloadBuffer < numPayloadBytesNeeded) { + throw new IllegalStateException("payload buffer is too short for expected number of elements"); + } + + // cannot use deephaven nulls as booleans are not nullable + useValidityBuffer(conversion, is, nodeInfo, chunk, outOffset, isValid); + + // flight requires that the payload buffer be padded to multiples of 8 bytes + final long payloadRead = getNumLongsForBitPackOfSize(nodeInfo.numElements) * 8L; + final long overhangPayload = payloadBuffer - payloadRead; + if (overhangPayload > 0) { + is.skipBytes(LongSizedDataStructure.intSize(DEBUG_NAME, overhangPayload)); + } + } + + return chunk; + } + + + private static void useValidityBuffer( + final ByteConversion conversion, + final DataInput is, + final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo, + final WritableByteChunk chunk, + final int offset, + final WritableLongChunk isValid) throws IOException { + final int numElements = nodeInfo.numElements; + final int numValidityWords = (numElements + 63) / 64; + + int ei = 0; + int pendingSkips = 0; + + for (int vi = 0; vi < numValidityWords; ++vi) { + int bitsLeftInThisWord = Math.min(64, numElements - vi * 64); + long validityWord = isValid.get(vi); + long payloadWord = is.readLong(); + do { + if ((validityWord & 1) == 1) { + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + ei += pendingSkips; + pendingSkips = 0; + } + final byte value = (payloadWord & 1) == 1 ? BooleanUtils.TRUE_BOOLEAN_AS_BYTE + : BooleanUtils.FALSE_BOOLEAN_AS_BYTE; + chunk.set(offset + ei++, conversion.apply(value)); + validityWord >>= 1; + payloadWord >>= 1; + bitsLeftInThisWord--; + } else { + final int skips = Math.min(Long.numberOfTrailingZeros(validityWord), bitsLeftInThisWord); + pendingSkips += skips; + validityWord >>= skips; + payloadWord >>= skips; + bitsLeftInThisWord -= skips; + } + } while (bitsLeftInThisWord > 0); + } + + if (pendingSkips > 0) { + chunk.fillWithNullValue(offset + ei, pendingSkips); + } + } +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index bbf5b398fe7..d03a0758701 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -40,9 +40,7 @@ public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int return new CharChunkReader(options); case Byte: if (typeInfo.type() == Boolean.class || typeInfo.type() == boolean.class) { - return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, - totalRows) -> BooleanChunkInputStreamGenerator.extractChunkFromInputStream( - options, fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return new BooleanChunkReader(); } return new ByteChunkReader(options); case Short: From 65aac864dde5e7365be12d4bd4c562e449dc685d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 27 Jun 2024 14:36:04 -0500 Subject: [PATCH 100/219] Better naming, docs --- .../barrage/chunk/BooleanChunkReader.java | 2 +- .../barrage/chunk/ByteChunkReader.java | 4 +- .../barrage/chunk/CharChunkReader.java | 4 +- .../chunk/ChunkInputStreamGenerator.java | 8 +- .../extensions/barrage/chunk/ChunkReader.java | 20 ++-- .../barrage/chunk/ChunkReaderFactory.java | 93 +++++++++++++++++++ .../barrage/chunk/ChunkReadingFactory.java | 85 ----------------- .../chunk/DefaultChunkReadingFactory.java | 8 +- .../barrage/chunk/DoubleChunkReader.java | 4 +- .../barrage/chunk/FloatChunkReader.java | 4 +- .../barrage/chunk/IntChunkReader.java | 4 +- .../barrage/chunk/LongChunkReader.java | 4 +- .../barrage/chunk/ShortChunkReader.java | 4 +- .../barrage/chunk/VarListChunkReader.java | 18 ++-- .../barrage/chunk/VectorChunkReader.java | 18 ++-- .../barrage/util/ArrowToTableConverter.java | 10 +- .../barrage/util/BarrageStreamReader.java | 21 +++-- 17 files changed, 164 insertions(+), 147 deletions(-) create mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReaderFactory.java delete mode 100644 extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java index da0cc96cad4..9195db956a4 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/BooleanChunkReader.java @@ -38,7 +38,7 @@ public BooleanChunkReader(ByteConversion conversion) { } @Override - public WritableChunk read(Iterator fieldNodeIter, + public WritableChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java index 29bee0fea05..d9a473df93f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ByteChunkReader.java @@ -48,7 +48,7 @@ public ByteChunkReader(StreamReaderOptions options, ByteConversion conversion) { public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableByteChunk inner = ByteChunkReader.this.read( + try (final WritableByteChunk inner = ByteChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableByteChunk read(Iterator fieldNodeIter, + public WritableByteChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java index b6fce96ffbf..d3fc3ed47a7 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/CharChunkReader.java @@ -44,7 +44,7 @@ public CharChunkReader(StreamReaderOptions options, CharConversion conversion) { public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableCharChunk inner = CharChunkReader.this.read( + try (final WritableCharChunk inner = CharChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -69,7 +69,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableCharChunk read(Iterator fieldNodeIter, + public WritableCharChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index a2ae09fb1d0..ce371d9ae87 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -32,6 +32,8 @@ import java.util.Iterator; import java.util.PrimitiveIterator; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; + public interface ChunkInputStreamGenerator extends SafeCloseable { long MS_PER_DAY = 24 * 60 * 60 * 1000L; long MIN_LOCAL_DATE_VALUE = QueryConstants.MIN_LONG / MS_PER_DAY; @@ -205,9 +207,9 @@ private static WritableChunk extractChunkFromInputStream( final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - return DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, type, componentType, null)) - .read(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); + return DefaultChunkReadingFactory.INSTANCE + .getReader(options, factor, typeInfo(chunkType, type, componentType, null)) + .readChunk(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); } /** diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java index de90744fc0d..92d40aafe0c 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java @@ -16,16 +16,18 @@ */ public interface ChunkReader { /** - * - * @param fieldNodeIter - * @param bufferInfoIter - * @param is - * @param outChunk - * @param outOffset - * @param totalRows - * @return + * Reads the given DataInput to extract the next Arrow buffer as a Deephaven Chunk. + * + * @param fieldNodeIter iterator to read fields from the stream + * @param bufferInfoIter iterator to read buffers from the stream + * @param is input stream containing buffers to be read + * @param outChunk chunk to write to + * @param outOffset offset within the outChunk to begin writing + * @param totalRows total rows to write to the outChunk + * @return a Chunk containing the data from the stream + * @throws IOException if an error occurred while reading the stream */ - WritableChunk read(final Iterator fieldNodeIter, + WritableChunk readChunk(final Iterator fieldNodeIter, final PrimitiveIterator.OfLong bufferInfoIter, final DataInput is, final WritableChunk outChunk, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReaderFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReaderFactory.java new file mode 100644 index 00000000000..3b91e27f193 --- /dev/null +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReaderFactory.java @@ -0,0 +1,93 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.extensions.barrage.chunk; + +import io.deephaven.chunk.ChunkType; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.Type; + +/** + * Supports creation of {@link ChunkReader} instances to use when processing a flight stream. JVM implementations for + * client and server should probably use {@link DefaultChunkReadingFactory#INSTANCE}. + */ +public interface ChunkReaderFactory { + /** + * Describes type info used by factory implementations when creating a ChunkReader. + */ + class TypeInfo { + private final ChunkType chunkType; + private final Class type; + private final Class componentType; + private final Field arrowField; + + public TypeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { + this.chunkType = chunkType; + this.type = type; + this.componentType = componentType; + this.arrowField = arrowField; + } + + public ChunkType chunkType() { + return chunkType; + } + + public Class type() { + return type; + } + + public Class componentType() { + return componentType; + } + + public Field arrowField() { + return arrowField; + } + + public Field componentArrowField() { + if (arrowField.typeType() != Type.List) { + throw new IllegalStateException("Not a flight List"); + } + if (arrowField.childrenLength() != 1) { + throw new IllegalStateException("Incorrect number of child Fields"); + } + return arrowField.children(0); + } + } + + /** + * Factory method to create a TypeInfo instance. + * + * @param chunkType the output chunk type + * @param type the Java type to be read into the chunk + * @param componentType the Java type of nested components + * @param arrowField the Arrow type to be read into the chunk + * @return a TypeInfo instance + */ + static TypeInfo typeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { + return new TypeInfo(chunkType, type, componentType, arrowField); + } + + /** + * Returns a {@link ChunkReader} for the specified arguments. + * + * @param options options for reading the stream + * @param factor a multiplicative factor to apply when reading integers + * @param typeInfo the type of data to read into a chunk + * @return a ChunkReader based on the given options, factory, and type to read + */ + ChunkReader getReader(final StreamReaderOptions options, final int factor, final TypeInfo typeInfo); + + /** + * Returns a {@link ChunkReader} for the specified arguments. + * + * @param options options for reading the stream + * @param typeInfo the type of data to read into a chunk + * @return a ChunkReader based on the given options, factory, and type to read + */ + default ChunkReader getReader(final StreamReaderOptions options, final TypeInfo typeInfo) { + return getReader(options, 1, typeInfo); + } + +} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java deleted file mode 100644 index d3f8ba84a95..00000000000 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReadingFactory.java +++ /dev/null @@ -1,85 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.extensions.barrage.chunk; - -import io.deephaven.chunk.ChunkType; -import io.deephaven.chunk.WritableChunk; -import io.deephaven.chunk.attributes.Values; -import io.deephaven.extensions.barrage.util.StreamReaderOptions; -import org.apache.arrow.flatbuf.Field; -import org.apache.arrow.flatbuf.Type; - -import java.io.DataInput; -import java.io.IOException; -import java.util.Iterator; -import java.util.PrimitiveIterator; - -/** - * - */ -public interface ChunkReadingFactory { - /** - * - */ - class ChunkTypeInfo { - private final ChunkType chunkType; - private final Class type; - private final Class componentType; - private final Field arrowField; - - public ChunkTypeInfo(ChunkType chunkType, Class type, Class componentType, Field arrowField) { - this.chunkType = chunkType; - this.type = type; - this.componentType = componentType; - this.arrowField = arrowField; - } - - public ChunkType chunkType() { - return chunkType; - } - - public Class type() { - return type; - } - - public Class componentType() { - return componentType; - } - - public Field arrowField() { - return arrowField; - } - - public Field componentArrowField() { - if (arrowField.typeType() != Type.List) { - throw new IllegalStateException("Not a flight List"); - } - if (arrowField.childrenLength() != 1) { - throw new IllegalStateException("Incorrect number of child Fields"); - } - return arrowField.children(0); - } - } - - /** - * - * @param options - * @param factor - * @param typeInfo - * @return - */ - ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final int factor, - final ChunkTypeInfo typeInfo); - - /** - * - * @param options - * @param typeInfo - * @return - */ - default ChunkReader extractChunkFromInputStream(final StreamReaderOptions options, final ChunkTypeInfo typeInfo) { - return extractChunkFromInputStream(options, 1, typeInfo); - } - -} diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java index d03a0758701..5a67a6cd8d5 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReadingFactory.java @@ -26,12 +26,12 @@ * may not round trip flight types correctly, but will round trip Deephaven table definitions and table data. Neither of * these is a required/expected property of being a Flight/Barrage/Deephaven client. */ -public final class DefaultChunkReadingFactory implements ChunkReadingFactory { - public static final ChunkReadingFactory INSTANCE = new DefaultChunkReadingFactory(); +public final class DefaultChunkReadingFactory implements ChunkReaderFactory { + public static final ChunkReaderFactory INSTANCE = new DefaultChunkReadingFactory(); @Override - public ChunkReader extractChunkFromInputStream(StreamReaderOptions options, int factor, - ChunkTypeInfo typeInfo) { + public ChunkReader getReader(StreamReaderOptions options, int factor, + TypeInfo typeInfo) { // TODO (deephaven-core#5453): pass in ArrowType to enable ser/deser of single java class in multiple formats switch (typeInfo.chunkType()) { case Boolean: diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java index 4b72273272b..39059f29a2f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java @@ -48,7 +48,7 @@ public DoubleChunkReader(StreamReaderOptions options, DoubleConversion conversio public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableDoubleChunk inner = DoubleChunkReader.this.read( + try (final WritableDoubleChunk inner = DoubleChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableDoubleChunk read(Iterator fieldNodeIter, + public WritableDoubleChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java index 6d434226235..df2bfa32071 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java @@ -48,7 +48,7 @@ public FloatChunkReader(StreamReaderOptions options, FloatConversion conversion) public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableFloatChunk inner = FloatChunkReader.this.read( + try (final WritableFloatChunk inner = FloatChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableFloatChunk read(Iterator fieldNodeIter, + public WritableFloatChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java index 39bce48735c..edf333f054b 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/IntChunkReader.java @@ -48,7 +48,7 @@ public IntChunkReader(StreamReaderOptions options, IntConversion conversion) { public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableIntChunk inner = IntChunkReader.this.read( + try (final WritableIntChunk inner = IntChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableIntChunk read(Iterator fieldNodeIter, + public WritableIntChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java index 743e0a37c8f..e96385b6740 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/LongChunkReader.java @@ -48,7 +48,7 @@ public LongChunkReader(StreamReaderOptions options, LongConversion conversion) { public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableLongChunk inner = LongChunkReader.this.read( + try (final WritableLongChunk inner = LongChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableLongChunk read(Iterator fieldNodeIter, + public WritableLongChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java index 56c17c2c11f..1bd92351d6c 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ShortChunkReader.java @@ -48,7 +48,7 @@ public ShortChunkReader(StreamReaderOptions options, ShortConversion conversion) public ChunkReader transform(Function transform) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { - try (final WritableShortChunk inner = ShortChunkReader.this.read( + try (final WritableShortChunk inner = ShortChunkReader.this.readChunk( fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { final WritableObjectChunk chunk = castOrCreateChunk( @@ -73,7 +73,7 @@ public ChunkReader transform(Function transform) { } @Override - public WritableShortChunk read(Iterator fieldNodeIter, + public WritableShortChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java index 71c294d6387..bf748cef9ae 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkReader.java @@ -19,14 +19,16 @@ import java.util.Iterator; import java.util.PrimitiveIterator; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; + public class VarListChunkReader implements ChunkReader { private static final String DEBUG_NAME = "VarListChunkReader"; private final ArrayExpansionKernel kernel; private final ChunkReader componentReader; - public VarListChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, - ChunkReadingFactory chunkReadingFactory) { + public VarListChunkReader(final StreamReaderOptions options, final ChunkReaderFactory.TypeInfo typeInfo, + ChunkReaderFactory chunkReaderFactory) { final Class componentType = typeInfo.type().getComponentType(); final Class innerComponentType = componentType != null ? componentType.getComponentType() : null; @@ -39,14 +41,12 @@ public VarListChunkReader(final StreamReaderOptions options, final ChunkReadingF } kernel = ArrayExpansionKernel.makeExpansionKernel(chunkType, componentType); - componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, innerComponentType, - typeInfo.componentArrowField())); + componentReader = chunkReaderFactory.getReader(options, + typeInfo(chunkType, componentType, innerComponentType, typeInfo.componentArrowField())); } @Override - public WritableObjectChunk read(Iterator fieldNodeIter, + public WritableObjectChunk readChunk(Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); @@ -55,7 +55,7 @@ public WritableObjectChunk read(Iterator ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + componentReader.readChunk(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { return WritableObjectChunk.makeWritableChunk(nodeInfo.numElements); } } @@ -93,7 +93,7 @@ public WritableObjectChunk read(Iterator inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + componentReader.readChunk(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java index decf9419d9d..10243e7adb9 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkReader.java @@ -20,26 +20,28 @@ import java.util.Iterator; import java.util.PrimitiveIterator; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; + public class VectorChunkReader implements ChunkReader { private static final String DEBUG_NAME = "VectorChunkReader"; private final ChunkReader componentReader; private final VectorExpansionKernel kernel; - public VectorChunkReader(final StreamReaderOptions options, final ChunkReadingFactory.ChunkTypeInfo typeInfo, - ChunkReadingFactory chunkReadingFactory) { + public VectorChunkReader(final StreamReaderOptions options, final ChunkReaderFactory.TypeInfo typeInfo, + ChunkReaderFactory chunkReaderFactory) { final Class componentType = VectorExpansionKernel.getComponentType(typeInfo.type(), typeInfo.componentType()); final ChunkType chunkType = ChunkType.fromElementType(componentType); - componentReader = chunkReadingFactory.extractChunkFromInputStream( - options, - new ChunkReadingFactory.ChunkTypeInfo(chunkType, componentType, componentType.getComponentType(), + componentReader = chunkReaderFactory.getReader( + options, typeInfo(chunkType, componentType, componentType.getComponentType(), typeInfo.componentArrowField())); kernel = VectorExpansionKernel.makeExpansionKernel(chunkType, componentType); } @Override - public WritableObjectChunk, Values> read(Iterator fieldNodeIter, + public WritableObjectChunk, Values> readChunk( + Iterator fieldNodeIter, PrimitiveIterator.OfLong bufferInfoIter, DataInput is, WritableChunk outChunk, int outOffset, int totalRows) throws IOException { final ChunkInputStreamGenerator.FieldNodeInfo nodeInfo = fieldNodeIter.next(); @@ -48,7 +50,7 @@ public WritableObjectChunk, Values> read(Iterator ignored = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + componentReader.readChunk(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { if (outChunk != null) { return outChunk.asWritableObjectChunk(); } @@ -89,7 +91,7 @@ public WritableObjectChunk, Values> read(Iterator inner = - componentReader.read(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + componentReader.readChunk(fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { chunk = kernel.contract(inner, offsets, outChunk, outOffset, totalRows); long nextValid = 0; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 04e257263a9..6cc4e736ab5 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -14,7 +14,7 @@ import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.ChunkReader; -import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.ChunkReaderFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.io.streams.ByteBufferInputStream; @@ -35,6 +35,7 @@ import java.util.List; import java.util.PrimitiveIterator; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; import static io.deephaven.extensions.barrage.util.BarrageProtoUtil.DEFAULT_SER_OPTIONS; /** @@ -157,9 +158,8 @@ protected void parseSchema(final Schema header) { // before doing this for (int i = 0; i < header.fieldsLength(); i++) { final int factor = (result.conversionFactors == null) ? 1 : result.conversionFactors[i]; - ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.extractChunkFromInputStream(options, factor, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], - header.fields(i))); + ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.getReader(options, factor, + typeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], header.fields(i))); readers.add(reader); } @@ -204,7 +204,7 @@ protected BarrageMessage createBarrageMessage(BarrageProtoUtil.MessageInfo mi, i msg.addColumnData[ci] = acd; msg.addColumnData[ci].data = new ArrayList<>(); try { - acd.data.add(readers.get(ci).read(fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); + acd.data.add(readers.get(ci).readChunk(fieldNodeIter, bufferInfoIter, mi.inputStream, null, 0, 0)); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index b38b1eedd57..50f3da16964 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -20,7 +20,7 @@ import io.deephaven.engine.table.impl.util.*; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.ChunkReader; -import io.deephaven.extensions.barrage.chunk.ChunkReadingFactory; +import io.deephaven.extensions.barrage.chunk.ChunkReaderFactory; import io.deephaven.extensions.barrage.chunk.DefaultChunkReadingFactory; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.chunk.ChunkType; @@ -43,6 +43,8 @@ import java.util.PrimitiveIterator; import java.util.function.LongConsumer; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; + public class BarrageStreamReader implements StreamReader { private static final Logger log = LoggerFactory.getLogger(BarrageStreamReader.class); @@ -59,7 +61,7 @@ public class BarrageStreamReader implements StreamReader { private BarrageMessage msg = null; - private final ChunkReadingFactory chunkReadingFactory = DefaultChunkReadingFactory.INSTANCE; + private final ChunkReaderFactory chunkReaderFactory = DefaultChunkReadingFactory.INSTANCE; private final List readers = new ArrayList<>(); public BarrageStreamReader(final LongConsumer deserializeTmConsumer) { @@ -247,8 +249,9 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - acd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, - chunk.size(), (int) batch.length())); + acd.data.set(lastChunkIndex, + readers.get(ci).readChunk(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), (int) batch.length())); chunk.setSize(chunk.size() + (int) batch.length()); } numAddRowsRead += batch.length(); @@ -276,8 +279,9 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } // fill the chunk with data and assign back into the array - mcd.data.set(lastChunkIndex, readers.get(ci).read(fieldNodeIter, bufferInfoIter, ois, chunk, - chunk.size(), numRowsToRead)); + mcd.data.set(lastChunkIndex, + readers.get(ci).readChunk(fieldNodeIter, bufferInfoIter, ois, chunk, + chunk.size(), numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } numModRowsRead += batch.length(); @@ -293,9 +297,8 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // TODO as with ArrowToTableConverter, see about copying the bytebuffer so we control the payload // ourselves Field field = schema.fields(i); - ChunkReader chunkReader = chunkReadingFactory.extractChunkFromInputStream(options, - new ChunkReadingFactory.ChunkTypeInfo(columnChunkTypes[i], columnTypes[i], - componentTypes[i], field)); + ChunkReader chunkReader = chunkReaderFactory.getReader(options, + typeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], field)); readers.add(chunkReader); } return null; From 54d828e287687b162f083a7bbe586040a10f85a7 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 27 Jun 2024 15:33:54 -0500 Subject: [PATCH 101/219] Make use of Schema safe, only look at copied buffers --- .../barrage/util/ArrowToTableConverter.java | 16 +++++++++------- .../barrage/util/BarrageStreamReader.java | 8 +++++--- .../deephaven/server/arrow/ArrowFlightUtil.java | 2 +- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 6cc4e736ab5..a258df7a9e5 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -84,7 +84,7 @@ public synchronized void setSchema(final ByteBuffer ipcMessage) { if (mi.header.headerType() != MessageHeader.Schema) { throw new IllegalArgumentException("The input is not a valid Arrow Schema IPC message"); } - parseSchema((Schema) mi.header.header(new Schema())); + parseSchema(mi.header); } @ScriptApi @@ -139,27 +139,29 @@ public synchronized void onCompleted() throws InterruptedException { completed = true; } - protected void parseSchema(final Schema header) { + protected void parseSchema(final Message message) { // The Schema instance (especially originated from Python) can't be assumed to be valid after the return // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. + ByteBuffer original = message.getByteBuffer(); + ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original); + Schema schema = new Schema(); + Message.getRootAsMessage(copy).header(schema); if (resultTable != null) { throw Exceptions.statusRuntimeException(Code.INVALID_ARGUMENT, "Schema evolution not supported"); } - final BarrageUtil.ConvertedArrowSchema result = BarrageUtil.convertArrowSchema(header); + final BarrageUtil.ConvertedArrowSchema result = BarrageUtil.convertArrowSchema(schema); resultTable = BarrageTable.make(null, result.tableDef, result.attributes, null); resultTable.setFlat(); ChunkType[] columnChunkTypes = result.computeWireChunkTypes(); columnTypes = result.computeWireTypes(); componentTypes = result.computeWireComponentTypes(); - // TODO see the note above, this is not safe since the buffer originated in python - we need to copy the schema - // before doing this - for (int i = 0; i < header.fieldsLength(); i++) { + for (int i = 0; i < schema.fieldsLength(); i++) { final int factor = (result.conversionFactors == null) ? 1 : result.conversionFactors[i]; ChunkReader reader = DefaultChunkReadingFactory.INSTANCE.getReader(options, factor, - typeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], header.fields(i))); + typeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], schema.fields(i))); readers.add(reader); } diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index 50f3da16964..a3c80f90eee 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -290,12 +290,14 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, } if (header != null && header.headerType() == MessageHeader.Schema) { - // there is no body and our clients do not want to see schema messages + // there is no body and our clients do not want to see schema messages, consume the schema so that we + // can read the following messages and return null. + ByteBuffer original = header.getByteBuffer(); + ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original); Schema schema = new Schema(); + Message.getRootAsMessage(copy).header(schema); header.header(schema); for (int i = 0; i < schema.fieldsLength(); i++) { - // TODO as with ArrowToTableConverter, see about copying the bytebuffer so we control the payload - // ourselves Field field = schema.fields(i); ChunkReader chunkReader = chunkReaderFactory.getReader(options, typeInfo(columnChunkTypes[i], columnTypes[i], componentTypes[i], field)); diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java index f25d5f3abb9..275b79ba216 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java @@ -217,7 +217,7 @@ public void onNext(final InputStream request) { } if (mi.header.headerType() == MessageHeader.Schema) { - parseSchema((Schema) mi.header.header(new Schema())); + parseSchema(mi.header); return; } From c981291c76ece34ffc82a0b7ee883cfbe7efae23 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 27 Jun 2024 19:23:31 -0500 Subject: [PATCH 102/219] Remove deprecated method, fix tests so they can have a Field --- .../chunk/ChunkInputStreamGenerator.java | 32 -------- .../chunk/BarrageColumnRoundTripTest.java | 75 ++++++++++++------- 2 files changed, 48 insertions(+), 59 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java index ce371d9ae87..e1f9039b47e 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkInputStreamGenerator.java @@ -5,7 +5,6 @@ import com.google.common.base.Charsets; import io.deephaven.chunk.ObjectChunk; -import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.PoolableChunk; @@ -21,7 +20,6 @@ import io.deephaven.vector.Vector; import org.jetbrains.annotations.Nullable; -import java.io.DataInput; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; @@ -29,10 +27,6 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZonedDateTime; -import java.util.Iterator; -import java.util.PrimitiveIterator; - -import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; public interface ChunkInputStreamGenerator extends SafeCloseable { long MS_PER_DAY = 24 * 60 * 60 * 1000L; @@ -186,32 +180,6 @@ static ChunkInputStreamGenerator makeInputStreamGenerator( } } - @Deprecated - static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final ChunkType chunkType, final Class type, final Class componentType, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, final int offset, final int totalRows) throws IOException { - return extractChunkFromInputStream(options, 1, chunkType, type, componentType, fieldNodeIter, bufferInfoIter, - is, outChunk, offset, totalRows); - } - - @Deprecated - private static WritableChunk extractChunkFromInputStream( - final StreamReaderOptions options, - final int factor, - final ChunkType chunkType, final Class type, final Class componentType, - final Iterator fieldNodeIter, - final PrimitiveIterator.OfLong bufferInfoIter, - final DataInput is, - final WritableChunk outChunk, final int outOffset, final int totalRows) throws IOException { - return DefaultChunkReadingFactory.INSTANCE - .getReader(options, factor, typeInfo(chunkType, type, componentType, null)) - .readChunk(fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows); - } - /** * Returns the number of rows that were sent before the first row in this generator. */ diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java index fdda13a40bf..3403ab08eff 100644 --- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java @@ -4,9 +4,12 @@ package io.deephaven.extensions.barrage.chunk; import com.google.common.io.LittleEndianDataInputStream; +import com.google.protobuf.ByteString; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSequence; +import io.deephaven.engine.table.ColumnDefinition; +import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -23,12 +26,18 @@ import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.WritableShortChunk; import io.deephaven.extensions.barrage.util.BarrageProtoUtil; +import io.deephaven.extensions.barrage.util.BarrageUtil; +import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.proto.flight.util.SchemaHelper; +import io.deephaven.qst.type.Type; import io.deephaven.util.BooleanUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.SafeCloseable; import io.deephaven.util.mutable.MutableInt; import io.deephaven.vector.LongVector; import io.deephaven.vector.LongVectorDirect; +import org.apache.arrow.flatbuf.Field; +import org.apache.arrow.flatbuf.Schema; import org.jetbrains.annotations.Nullable; import java.io.ByteArrayInputStream; @@ -39,11 +48,16 @@ import java.time.LocalTime; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.PrimitiveIterator; import java.util.Random; import java.util.function.Consumer; import java.util.function.IntFunction; import java.util.stream.LongStream; +import static io.deephaven.extensions.barrage.chunk.ChunkReaderFactory.typeInfo; + public class BarrageColumnRoundTripTest extends RefreshingTableTestCase { private static final BarrageSubscriptionOptions OPT_DEFAULT_DH_NULLS = @@ -58,6 +72,23 @@ public class BarrageColumnRoundTripTest extends RefreshingTableTestCase { OPT_DEFAULT }; + private static WritableChunk readChunk( + final StreamReaderOptions options, + final ChunkType chunkType, + final Class type, + final Class componentType, + final Field field, + final Iterator fieldNodeIter, + final PrimitiveIterator.OfLong bufferInfoIter, + final DataInput is, + final WritableChunk outChunk, + final int offset, + final int totalRows) throws IOException { + return DefaultChunkReadingFactory.INSTANCE + .getReader(options, typeInfo(chunkType, type, componentType, field)) + .readChunk(fieldNodeIter, bufferInfoIter, is, outChunk, offset, totalRows); + } + public void testCharChunkSerialization() throws IOException { final Random random = new Random(0); for (final BarrageSubscriptionOptions opts : options) { @@ -637,6 +668,10 @@ private static void testRoundTripSerialization( } else { chunkType = ChunkType.fromElementType(type); } + ByteString schemaBytes = BarrageUtil.schemaBytesFromTableDefinition( + TableDefinition.of(ColumnDefinition.of("col", Type.find(type))), Collections.emptyMap(), false); + Schema schema = SchemaHelper.flatbufSchema(schemaBytes.asReadOnlyByteBuffer()); + Field field = schema.fields(0); final WritableChunk srcData = chunkType.makeWritableChunk(4096); initData.accept(srcData); @@ -651,8 +686,7 @@ private static void testRoundTripSerialization( // full sub logic try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = new BarrageProtoUtil.ExposedByteArrayOutputStream(); - final ChunkInputStreamGenerator.DrainableColumn column = - generator.getInputStream(options, null)) { + final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, null)) { final ArrayList fieldNodes = new ArrayList<>(); @@ -663,11 +697,8 @@ private static void testRoundTripSerialization( column.drainTo(baos); final DataInput dis = new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - try (final WritableChunk rtData = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), - bufferNodes.build().iterator(), - dis, null, 0, 0)) { + try (final WritableChunk rtData = readChunk(options, chunkType, type, type.getComponentType(), + field, fieldNodes.iterator(), bufferNodes.build().iterator(), dis, null, 0, 0)) { Assert.eq(data.size(), "data.size()", rtData.size(), "rtData.size()"); validator.assertExpected(data, rtData, null, 0); } @@ -687,11 +718,8 @@ private static void testRoundTripSerialization( column.drainTo(baos); final DataInput dis = new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - try (final WritableChunk rtData = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), - bufferNodes.build().iterator(), - dis, null, 0, 0)) { + try (final WritableChunk rtData = readChunk(options, chunkType, type, type.getComponentType(), + field, fieldNodes.iterator(), bufferNodes.build().iterator(), dis, null, 0, 0)) { Assert.eq(rtData.size(), "rtData.size()", 0); } } @@ -718,11 +746,8 @@ private static void testRoundTripSerialization( column.drainTo(baos); final DataInput dis = new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - try (final WritableChunk rtData = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), - bufferNodes.build().iterator(), - dis, null, 0, 0)) { + try (final WritableChunk rtData = readChunk(options, chunkType, type, type.getComponentType(), + field, fieldNodes.iterator(), bufferNodes.build().iterator(), dis, null, 0, 0)) { Assert.eq(subset.intSize(), "subset.intSize()", rtData.size(), "rtData.size()"); validator.assertExpected(data, rtData, subset, 0); } @@ -745,19 +770,15 @@ private static void testRoundTripSerialization( // first message DataInput dis = new LittleEndianDataInputStream( new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - try (final WritableChunk rtData = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), - Arrays.stream(buffers).iterator(), - dis, null, 0, data.size() * 2)) { + try (final WritableChunk rtData = readChunk(options, chunkType, type, type.getComponentType(), + field, fieldNodes.iterator(), Arrays.stream(buffers).iterator(), dis, null, 0, + data.size() * 2)) { // second message dis = new LittleEndianDataInputStream( new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - final WritableChunk rtData2 = - ChunkInputStreamGenerator.extractChunkFromInputStream(options, - chunkType, type, type.getComponentType(), fieldNodes.iterator(), - Arrays.stream(buffers).iterator(), - dis, rtData, data.size(), data.size() * 2); + final WritableChunk rtData2 = readChunk(options, chunkType, type, type.getComponentType(), + field, fieldNodes.iterator(), Arrays.stream(buffers).iterator(), dis, rtData, data.size(), + data.size() * 2); Assert.eq(rtData, "rtData", rtData2, "rtData2"); validator.assertExpected(data, rtData, null, 0); validator.assertExpected(data, rtData, null, data.size()); From f40a9a7dc13630a825790755bd8e4d1bd98d88b2 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 28 Jun 2024 08:05:42 -0500 Subject: [PATCH 103/219] Rewind bytebuffers after copying --- .../extensions/barrage/util/ArrowToTableConverter.java | 2 +- .../deephaven/extensions/barrage/util/BarrageStreamReader.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index a258df7a9e5..9a71c4ec72e 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -144,7 +144,7 @@ protected void parseSchema(final Message message) { // of this method. Until https://github.com/jpy-consortium/jpy/issues/126 is resolved, we need to make a copy of // the header to use after the return of this method. ByteBuffer original = message.getByteBuffer(); - ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original); + ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original).rewind(); Schema schema = new Schema(); Message.getRootAsMessage(copy).header(schema); if (resultTable != null) { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index a3c80f90eee..353cd157e1f 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -293,7 +293,7 @@ public BarrageMessage safelyParseFrom(final StreamReaderOptions options, // there is no body and our clients do not want to see schema messages, consume the schema so that we // can read the following messages and return null. ByteBuffer original = header.getByteBuffer(); - ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original); + ByteBuffer copy = ByteBuffer.allocate(original.remaining()).put(original).rewind(); Schema schema = new Schema(); Message.getRootAsMessage(copy).header(schema); header.header(schema); From 29b05f009e0ba5a16b2c2765bc7be5ee8be69e9b Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 1 Jul 2024 09:38:54 -0500 Subject: [PATCH 104/219] Implement snapshot requests --- .../ChunkListInputStreamGenerator.java | 6 +- .../VarListChunkInputStreamGenerator.java | 3 +- .../VectorChunkInputStreamGenerator.java | 3 +- .../api/barrage/WebBarrageStreamReader.java | 3 +- .../barrage/data/WebBarrageSubscription.java | 6 +- .../AbstractTableSubscription.java | 68 ++++++---- .../TableViewportSubscription.java | 117 +++++++++++++++++- .../web/client/api/tree/JsTreeTable.java | 15 ++- .../web/client/state/ClientTableState.java | 8 +- 9 files changed, 184 insertions(+), 45 deletions(-) diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java index 11606dc2178..fc4ac399924 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/ChunkListInputStreamGenerator.java @@ -27,11 +27,13 @@ public ChunkListInputStreamGenerator(Class type, Class componentType, List long rowOffset = 0; for (int i = 0; i < data.size(); ++i) { final Chunk valuesChunk = data.get(i); - generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, valuesChunk, rowOffset); + generators[i] = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, + valuesChunk, rowOffset); rowOffset += valuesChunk.size(); } this.generators = Arrays.asList(generators); - emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, chunkType.getEmptyChunk(), 0); + emptyGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, componentType, + chunkType.getEmptyChunk(), 0); } public List generators() { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java index e49f6dd5c26..c7daaa55a0e 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VarListChunkInputStreamGenerator.java @@ -54,7 +54,8 @@ private synchronized void computePayload() { offsets = WritableIntChunk.makeWritableChunk(chunk.size() + 1); final WritableChunk innerChunk = kernel.expand(chunk, offsets); - innerGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, myType, myComponentType, innerChunk, 0); + innerGenerator = + ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, myType, myComponentType, innerChunk, 0); } @Override diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java index d302e571ebe..74f7c68ae2c 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/VectorChunkInputStreamGenerator.java @@ -53,7 +53,8 @@ private synchronized void computePayload() { offsets = WritableIntChunk.makeWritableChunk(chunk.size() + 1); final WritableChunk innerChunk = kernel.expand(chunk, offsets); - innerGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, componentType, innerComponentType, innerChunk, 0); + innerGenerator = ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, componentType, + innerComponentType, innerChunk, 0); } @Override diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java index 59fabe40dc3..fd72204866c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageStreamReader.java @@ -255,7 +255,8 @@ public WebBarrageMessage parseFrom(final StreamReaderOptions options, BitSet exp // fill the chunk with data and assign back into the array mcd.data.set(lastChunkIndex, - readers.get(ci).readChunk(fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), numRowsToRead)); + readers.get(ci).readChunk(fieldNodeIter, bufferInfoIter, ois, chunk, chunk.size(), + numRowsToRead)); chunk.setSize(chunk.size() + numRowsToRead); } numModRowsRead += batch.length(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index e45c262fd51..0db006dd94d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -4,7 +4,6 @@ package io.deephaven.web.client.api.barrage.data; import elemental2.core.JsArray; -import elemental2.dom.DomGlobal; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.DoubleChunk; @@ -227,6 +226,10 @@ public RangeSet getServerViewport() { return serverViewport; } + public boolean isReversed() { + return serverReverseViewport; + } + /** * Reads a value from the table subscription. * @@ -411,7 +414,6 @@ public void applyUpdates(WebBarrageMessage message) { } RangeSet destinationRowSet = getFreeRows(message.rowsIncluded.size()); - DomGlobal.console.log("freeRows", destinationRowSet.toString()); // RangeSet destinationRowSet = new RangeSet(); // message.rowsIncluded.indexIterator().forEachRemaining((long row) -> { // destinationRowSet.addRange(new Range(row, row)); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 1e0db55b990..b0e95c940ff 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -71,7 +71,7 @@ public enum Status { private final ClientTableState state; private final WorkerConnection connection; - private final int rowStyleColumn; + protected final int rowStyleColumn; private JsArray columns; private BitSet columnBitSet; private BarrageSubscriptionOptions options; @@ -217,9 +217,15 @@ private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet to protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { UpdateEventData detail = new UpdateEventData( - transformRowsetForConsumer(rowsAdded), - transformRowsetForConsumer(rowsRemoved), - transformRowsetForConsumer(totalMods), + barrageSubscription, + rowStyleColumn, + columns, + transformRowsetForConsumer(rowsAdded, barrageSubscription.getServerViewport(), + barrageSubscription.isReversed()), + transformRowsetForConsumer(rowsRemoved, barrageSubscription.getServerViewport(), + barrageSubscription.isReversed()), + transformRowsetForConsumer(totalMods, barrageSubscription.getServerViewport(), + barrageSubscription.isReversed()), barrageSubscription.getServerViewport() != null ? null : shifted); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); @@ -228,11 +234,15 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t @TsInterface @TsName(namespace = "dh") - public class SubscriptionRow implements TableData.Row { + public static class SubscriptionRow implements TableData.Row { + private final WebBarrageSubscription subscription; + private final int rowStyleColumn; protected final long index; public LongWrapper indexCached; - public SubscriptionRow(long index) { + public SubscriptionRow(WebBarrageSubscription subscription, int rowStyleColumn, long index) { + this.subscription = subscription; + this.rowStyleColumn = rowStyleColumn; this.index = index; } @@ -246,7 +256,7 @@ public LongWrapper getIndex() { @Override public Any get(Column column) { - return barrageSubscription.getData(index, column.getIndex()); + return subscription.getData(index, column.getIndex()); } @Override @@ -256,23 +266,26 @@ public Format getFormat(Column column) { String numberFormat = null; String formatString = null; if (column.getStyleColumnIndex() != null) { - cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + cellColors = subscription.getData(index, column.getStyleColumnIndex()); } if (rowStyleColumn != TableData.NO_ROW_FORMAT_COLUMN) { - rowColors = barrageSubscription.getData(index, rowStyleColumn); + rowColors = subscription.getData(index, rowStyleColumn); } if (column.getFormatStringColumnIndex() != null) { - numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()); } if (column.getFormatStringColumnIndex() != null) { - formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + formatString = subscription.getData(index, column.getFormatStringColumnIndex()); } return new Format(cellColors, rowColors, numberFormat, formatString); } } - public class UpdateEventData implements SubscriptionTableData, ViewportData { + public static class UpdateEventData implements SubscriptionTableData, ViewportData { + protected final WebBarrageSubscription subscription; + private final int rowStyleColumn; + private final JsArray columns; private final JsRangeSet added; private final JsRangeSet removed; private final JsRangeSet modified; @@ -283,7 +296,11 @@ public class UpdateEventData implements SubscriptionTableData, ViewportData { // TODO expose this property only if this is a viewport public double offset; - public UpdateEventData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + public UpdateEventData(WebBarrageSubscription subscription, int rowStyleColumn, JsArray columns, + RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + this.subscription = subscription; + this.rowStyleColumn = rowStyleColumn; + this.columns = columns; this.added = new JsRangeSet(added); this.removed = new JsRangeSet(removed); this.modified = new JsRangeSet(modified); @@ -303,8 +320,9 @@ public Double getOffset() { public JsArray<@TsTypeRef(SubscriptionRow.class) ? extends SubscriptionRow> getRows() { if (allRows == null) { allRows = new JsArray<>(); - RangeSet rowSet = barrageSubscription.getCurrentRowSet(); - RangeSet positions = transformRowsetForConsumer(rowSet); + RangeSet rowSet = subscription.getCurrentRowSet(); + RangeSet positions = + transformRowsetForConsumer(rowSet, subscription.getServerViewport(), subscription.isReversed()); positions.indexIterator().forEachRemaining((long index) -> { allRows.push(makeRow(index)); }); @@ -316,7 +334,7 @@ public Double getOffset() { } protected SubscriptionRow makeRow(long index) { - return new SubscriptionRow(index); + return new SubscriptionRow(subscription, rowStyleColumn, index); } @Override @@ -336,7 +354,7 @@ public Any getData(int index, Column column) { @Override public Any getData(long key, Column column) { - return barrageSubscription.getData(key, column.getIndex()); + return subscription.getData(key, column.getIndex()); } @Override @@ -351,16 +369,16 @@ public Format getFormat(long index, Column column) { String numberFormat = null; String formatString = null; if (column.getStyleColumnIndex() != null) { - cellColors = barrageSubscription.getData(index, column.getStyleColumnIndex()); + cellColors = subscription.getData(index, column.getStyleColumnIndex()); } if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - rowColors = barrageSubscription.getData(index, rowStyleColumn); + rowColors = subscription.getData(index, rowStyleColumn); } if (column.getFormatStringColumnIndex() != null) { - numberFormat = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()); } if (column.getFormatStringColumnIndex() != null) { - formatString = barrageSubscription.getData(index, column.getFormatStringColumnIndex()); + formatString = subscription.getData(index, column.getFormatStringColumnIndex()); } return new Format(cellColors, rowColors, numberFormat, formatString); } @@ -387,7 +405,7 @@ public JsRangeSet getModified() { @Override public JsRangeSet getFullIndex() { - return new JsRangeSet(barrageSubscription.getCurrentRowSet()); + return new JsRangeSet(subscription.getCurrentRowSet()); } } @@ -397,9 +415,9 @@ public JsRangeSet getFullIndex() { * @param rowSet the rowset to possibly transform * @return a transformed rowset */ - private RangeSet transformRowsetForConsumer(RangeSet rowSet) { - if (barrageSubscription.getServerViewport() != null) { - return rowSet.subsetForPositions(barrageSubscription.getServerViewport(), false);// TODO reverse + private static RangeSet transformRowsetForConsumer(RangeSet rowSet, @Nullable RangeSet viewport, boolean reversed) { + if (viewport != null) { + return rowSet.subsetForPositions(viewport, reversed); } return rowSet; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index e65bc4d75c8..88a44a75ec2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -3,6 +3,7 @@ // package io.deephaven.web.client.api.subscription; +import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; import elemental2.dom.CustomEvent; @@ -10,10 +11,21 @@ import elemental2.dom.DomGlobal; import elemental2.promise.IThenable; import elemental2.promise.Promise; +import io.deephaven.barrage.flatbuf.BarrageMessageType; +import io.deephaven.barrage.flatbuf.BarrageSnapshotRequest; +import io.deephaven.extensions.barrage.BarrageSnapshotOptions; +import io.deephaven.extensions.barrage.ColumnConversionMode; +import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.util.mutable.MutableLong; import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; +import io.deephaven.web.client.api.barrage.WebBarrageMessage; +import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; +import io.deephaven.web.client.api.barrage.WebBarrageUtils; +import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; +import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.fu.LazyPromise; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; @@ -22,6 +34,11 @@ import jsinterop.annotations.JsOptional; import jsinterop.base.Js; +import java.io.IOException; +import java.util.Collections; + +import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; + /** * Encapsulates event handling around table subscriptions by "cheating" and wrapping up a JsTable instance to do the * real dirty work. This allows a viewport to stay open on the old table if desired, while this one remains open. @@ -118,7 +135,8 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t // } // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... - UpdateEventData detail = new UpdateEventData(rowsAdded, rowsRemoved, totalMods, shifted); + UpdateEventData detail = new UpdateEventData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, + rowsRemoved, totalMods, shifted); detail.offset = this.serverViewport.getFirstRow(); this.viewportData = detail; CustomEventInit event = CustomEventInit.create(); @@ -237,6 +255,9 @@ public void internalClose() { // indicate that the base table shouldn't get events anymore, even if it is still retained elsewhere originalActive = false; + if (retained) { + return; + } // if (retained || status == Status.DONE) { // // the JsTable has indicated it is no longer interested in this viewport, but other calling // // code has retained it, keep it open for now. @@ -294,9 +315,99 @@ public double size() { public Promise snapshot(JsRangeSet rows, Column[] columns) { retainForExternalUse(); // TODO #1039 slice rows and drop columns + BarrageSnapshotOptions options = BarrageSnapshotOptions.builder() + .batchSize(WebBarrageSubscription.BATCH_SIZE) + .maxMessageSize(WebBarrageSubscription.MAX_MESSAGE_SIZE) + .columnConversionMode(ColumnConversionMode.Stringify) + .useDeephavenNulls(true) + .build(); + + WebBarrageSubscription snapshot = + WebBarrageSubscription.subscribe(state(), (serverViewport1, serverColumns, serverReverseViewport) -> { + }, (rowsAdded, rowsRemoved, totalMods, shifted, modifiedColumnSet) -> { + }); + + WebBarrageStreamReader reader = new WebBarrageStreamReader(); + return new Promise<>((resolve, reject) -> { + + BiDiStream doExchange = connection().streamFactory().create( + headers -> connection().flightServiceClient().doExchange(headers), + (first, headers) -> connection().browserFlightServiceClient().openDoExchange(first, headers), + (next, headers, c) -> connection().browserFlightServiceClient().nextDoExchange(next, headers, + c::apply), + new FlightData()); + MutableLong rowsReceived = new MutableLong(0); + doExchange.onData(data -> { + WebBarrageMessage message; + try { + message = reader.parseFrom(options, null, state().chunkTypes(), state().columnTypes(), + state().componentTypes(), data); + } catch (IOException e) { + throw new RuntimeException(e); + } + if (message != null) { + // Replace rowsets with flat versions + long resultSize = message.rowsIncluded.size(); + message.rowsAdded = RangeSet.ofRange(rowsReceived.get(), rowsReceived.get() + resultSize - 1); + message.rowsIncluded = message.rowsAdded; + rowsReceived.add(resultSize); + + // Update our table data with the complete message + snapshot.applyUpdates(message); + } + }); + FlightData payload = new FlightData(); + final FlatBufferBuilder metadata = new FlatBufferBuilder(); + + int colOffset = 0; + if (columns != null) { + colOffset = + BarrageSnapshotRequest.createColumnsVector(metadata, state().makeBitset(columns).toByteArray()); + } + int vpOffset = BarrageSnapshotRequest.createViewportVector(metadata, + serializeRanges(Collections.singleton(rows.getRange()))); + int optOffset = 0; + if (options != null) { + optOffset = options.appendTo(metadata); + } + + final int ticOffset = BarrageSnapshotRequest.createTicketVector(metadata, + Js.uncheckedCast(state().getHandle().getTicket())); + BarrageSnapshotRequest.startBarrageSnapshotRequest(metadata); + BarrageSnapshotRequest.addColumns(metadata, colOffset); + BarrageSnapshotRequest.addViewport(metadata, vpOffset); + BarrageSnapshotRequest.addSnapshotOptions(metadata, optOffset); + BarrageSnapshotRequest.addTicket(metadata, ticOffset); + BarrageSnapshotRequest.addReverseViewport(metadata, false); + metadata.finish(BarrageSnapshotRequest.endBarrageSnapshotRequest(metadata)); + + // final FlatBufferBuilder wrapper = new FlatBufferBuilder(); + // final int innerOffset = wrapper.createByteVector(metadata.dataBuffer()); + // wrapper.finish(BarrageMessageWrapper.createBarrageMessageWrapper( + // wrapper, + // BarrageUtil.FLATBUFFER_MAGIC, + // BarrageMessageType.BarrageSnapshotRequest, + // innerOffset)); + + + payload.setAppMetadata(WebBarrageUtils.wrapMessage(metadata, BarrageMessageType.BarrageSnapshotRequest)); + doExchange.onEnd(status -> { + if (status.isOk()) { + // notify the caller that the snapshot is finished + resolve.onInvoke(new UpdateEventData(snapshot, rowStyleColumn, Js.uncheckedCast(columns), + RangeSet.ofRange(0, rowsReceived.get() - 1), + RangeSet.empty(), + RangeSet.empty(), + null)); + } else { + reject.onInvoke(status); + } + }); + + doExchange.send(payload); + doExchange.end(); - - return null; + }); } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index d6c91fc7253..0747bc7d4c2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -27,6 +27,7 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; import io.deephaven.web.client.api.*; import io.deephaven.web.client.api.barrage.WebBarrageUtils; +import io.deephaven.web.client.api.barrage.data.WebBarrageSubscription; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.stream.ResponseStreamWrapper; @@ -382,8 +383,9 @@ public class TreeViewportData extends AbstractTableSubscription.UpdateEventData // private final JsArray columns; private final int constituentDepth; - private TreeViewportData(RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { - super(added, removed, modified, shifted); + private TreeViewportData(WebBarrageSubscription subscription, int rowStyleColumn, JsArray columns, + RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + super(subscription, rowStyleColumn, columns, added, removed, modified, shifted); // this.offset = offset; @@ -506,7 +508,7 @@ public Format getFormat(long index, Column column) { @Override protected SubscriptionRow makeRow(long index) { - return new TreeRow(index); + return new TreeRow(subscription, index); } public double getTreeSize() { @@ -522,8 +524,8 @@ public double getTreeSize() { @TsName(namespace = "dh") public class TreeRow extends SubscriptionRow { - public TreeRow(long index) { - super(index); + public TreeRow(WebBarrageSubscription subscription, long index) { + super(subscription, rowStyleColumn, index); } /** @@ -609,7 +611,8 @@ public void setViewport(double firstRow, double lastRow, JsArray columns protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... - TreeViewportData detail = new TreeViewportData(rowsAdded, rowsRemoved, totalMods, shifted); + TreeViewportData detail = new TreeViewportData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, + rowsRemoved, totalMods, shifted); detail.offset = this.serverViewport.getFirstRow(); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 40246775a56..d1f6c8d8bdd 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -65,10 +65,10 @@ public ChunkType[] chunkTypes() { return ChunkType.Byte; } // JS client holds date objects as objects, not as longs -// if (dataType == DateWrapper.class) { -// // Note that storing ZonedDateTime as a primitive is lossy on the time zone. -// return ChunkType.Long; -// } + // if (dataType == DateWrapper.class) { + // // Note that storing ZonedDateTime as a primitive is lossy on the time zone. + // return ChunkType.Long; + // } if (dataType == Long.class || dataType == long.class) { // JS client holds longs as LongWrappers return ChunkType.Object; From 9ee39fefdad5612772f046a488ff8b793eb4a609 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 1 Jul 2024 09:51:20 -0500 Subject: [PATCH 105/219] Prune dead code no longer required with this update --- .../web/client/api/JsColumnStatistics.java | 4 +- .../client/api/barrage/WebBarrageUtils.java | 482 ------------------ .../web/shared/data/ColumnStatistics.java | 169 ------ .../web/shared/data/DeltaUpdates.java | 174 ------- .../web/shared/data/TableSnapshot.java | 60 --- .../shared/data/TableSubscriptionRequest.java | 76 --- .../deephaven/web/shared/data/Viewport.java | 75 --- .../columns/BigDecimalArrayColumnData.java | 44 -- .../columns/BigIntegerArrayColumnData.java | 44 -- .../data/columns/BooleanArrayColumnData.java | 41 -- .../data/columns/ByteArrayColumnData.java | 41 -- .../data/columns/CharArrayColumnData.java | 41 -- .../web/shared/data/columns/ColumnData.java | 11 - .../data/columns/DoubleArrayColumnData.java | 41 -- .../data/columns/FloatArrayColumnData.java | 41 -- .../data/columns/IntArrayColumnData.java | 41 -- .../columns/LocalDateArrayColumnData.java | 46 -- .../columns/LocalTimeArrayColumnData.java | 45 -- .../data/columns/LongArrayColumnData.java | 41 -- .../data/columns/ShortArrayColumnData.java | 41 -- .../columns/StringArrayArrayColumnData.java | 41 -- .../data/columns/StringArrayColumnData.java | 41 -- 22 files changed, 1 insertion(+), 1639 deletions(-) delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSnapshot.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/Viewport.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigDecimalArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigIntegerArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BooleanArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ByteArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/CharArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/DoubleArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/FloatArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/IntArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalDateArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalTimeArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LongArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ShortArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayArrayColumnData.java delete mode 100644 web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayColumnData.java diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java index 7b117047e80..d66034f67d4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java @@ -8,7 +8,6 @@ import com.vertispan.tsdefs.annotations.TsName; import elemental2.core.JsArray; import elemental2.core.JsMap; -import io.deephaven.web.shared.data.ColumnStatistics; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsProperty; @@ -18,8 +17,7 @@ import java.util.Map; /** - * Javascript wrapper for {@link ColumnStatistics} This class holds the results of a call to generate statistics on a - * table column. + * Represents statistics for a given table column. */ @TsInterface @TsName(name = "ColumnStatistics", namespace = "dh") diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index d62317e6102..39e85a40f35 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -7,36 +7,21 @@ import elemental2.core.*; import io.deephaven.barrage.flatbuf.BarrageMessageType; import io.deephaven.barrage.flatbuf.BarrageMessageWrapper; -import io.deephaven.barrage.flatbuf.BarrageModColumnMetadata; -import io.deephaven.barrage.flatbuf.BarrageUpdateMetadata; import io.deephaven.web.client.api.barrage.def.ColumnDefinition; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; import io.deephaven.web.client.api.barrage.def.TableAttributesDefinition; import io.deephaven.web.shared.data.*; -import io.deephaven.web.shared.data.columns.*; -import jsinterop.base.Js; -import org.apache.arrow.flatbuf.Buffer; -import org.apache.arrow.flatbuf.FieldNode; import org.apache.arrow.flatbuf.KeyValue; import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; -import org.apache.arrow.flatbuf.RecordBatch; import org.apache.arrow.flatbuf.Schema; import org.gwtproject.nio.TypedArrayHelper; -import java.math.BigDecimal; -import java.math.BigInteger; import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.IntBuffer; -import java.nio.charset.StandardCharsets; -import java.util.BitSet; import java.util.HashMap; -import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.function.IntFunction; -import java.util.stream.IntStream; /** * Utility to read barrage record batches. @@ -122,51 +107,6 @@ public static Map keyValuePairs(String filterPrefix, double coun return map; } - /** - * Iterator wrapper that allows peeking at the next item, if any. - */ - private static class Iter implements Iterator { - private final Iterator wrapped; - private T next; - - private Iter(Iterator wrapped) { - this.wrapped = wrapped; - } - - public T peek() { - if (next != null) { - return next; - } - return next = next(); - } - - @Override - public boolean hasNext() { - return next != null || wrapped.hasNext(); - } - - @Override - public T next() { - if (next == null) { - return wrapped.next(); - } - T val = next; - next = null; - return val; - } - } - - public static Uint8Array makeUint8ArrayFromBitset(BitSet bitset) { - int length = (bitset.previousSetBit(Integer.MAX_VALUE - 1) + 8) / 8; - Uint8Array array = new Uint8Array(length); - byte[] bytes = bitset.toByteArray(); - for (int i = 0; i < bytes.length; i++) { - array.setAt(i, (double) bytes[i]); - } - - return array; - } - public static ByteBuffer serializeRanges(Set rangeSets) { final RangeSet s; if (rangeSets.isEmpty()) { @@ -182,426 +122,4 @@ public static ByteBuffer serializeRanges(Set rangeSets) { return CompressedRangeSetReader.writeRange(s); } - - public static ByteBuffer typedArrayToAlignedLittleEndianByteBuffer(TypedArray data) { - // Slice before wrapping to align contents - // TODO potentially only do this if not aligned already - ByteBuffer bb = TypedArrayHelper.wrap(data.slice()); - bb.order(ByteOrder.LITTLE_ENDIAN); - return bb; - } - - public static TableSnapshot createSnapshot(RecordBatch header, ByteBuffer body, BarrageUpdateMetadata barrageUpdate, - boolean isViewport, String[] columnTypes) { - RangeSet added; - - final RangeSet includedAdditions; - if (barrageUpdate == null) { - includedAdditions = added = RangeSet.ofRange(0, (long) (header.length() - 1)); - } else { - added = new CompressedRangeSetReader() - .read(barrageUpdate.addedRowsAsByteBuffer()); - - ByteBuffer addedRowsIncluded = barrageUpdate.addedRowsIncludedAsByteBuffer(); - if (isViewport && addedRowsIncluded != null) { - includedAdditions = new CompressedRangeSetReader() - .read(addedRowsIncluded); - } else { - // if this isn't a viewport, then a second index isn't sent, because all rows are included - includedAdditions = added; - } - } - - // read the nodes and buffers into iterators so that we can descend into the data columns as necessary - Iter nodes = - new Iter<>(IntStream.range(0, (int) header.nodesLength()).mapToObj(header::nodes).iterator()); - Iter buffers = - new Iter<>(IntStream.range(0, (int) header.buffersLength()).mapToObj(header::buffers).iterator()); - ColumnData[] columnData = new ColumnData[columnTypes.length]; - for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { - columnData[columnIndex] = - readArrowBuffer(body, nodes, buffers, (int) includedAdditions.size(), columnTypes[columnIndex]); - } - - return new TableSnapshot(includedAdditions, columnData, added.size()); - } - - public static DeltaUpdatesBuilder deltaUpdates(BarrageUpdateMetadata barrageUpdate, boolean isViewport, - String[] columnTypes) { - return new DeltaUpdatesBuilder(barrageUpdate, isViewport, columnTypes); - } - - public static class DeltaUpdatesBuilder { - private final DeltaUpdates deltaUpdates = new DeltaUpdates(); - private final BarrageUpdateMetadata barrageUpdate; - private final String[] columnTypes; - private long numAddRowsRemaining = 0; - private long numModRowsRemaining = 0; - - public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewport, String[] columnTypes) { - this.barrageUpdate = barrageUpdate; - this.columnTypes = columnTypes; - - deltaUpdates.setAdded(new CompressedRangeSetReader() - .read(barrageUpdate.addedRowsAsByteBuffer())); - deltaUpdates.setRemoved(new CompressedRangeSetReader() - .read(barrageUpdate.removedRowsAsByteBuffer())); - - deltaUpdates.setShiftedRanges(ShiftedRangeReader.read(barrageUpdate.shiftDataAsByteBuffer())); - - RangeSet includedAdditions; - - ByteBuffer addedRowsIncluded = barrageUpdate.addedRowsIncludedAsByteBuffer(); - if (isViewport && addedRowsIncluded != null) { - includedAdditions = new CompressedRangeSetReader() - .read(addedRowsIncluded); - } else { - // if this isn't a viewport, then a second index isn't sent, because all rows are included - includedAdditions = deltaUpdates.getAdded(); - } - numAddRowsRemaining = includedAdditions.size(); - deltaUpdates.setIncludedAdditions(includedAdditions); - deltaUpdates.setSerializedAdditions(new DeltaUpdates.ColumnAdditions[0]); - deltaUpdates.setSerializedModifications(new DeltaUpdates.ColumnModifications[0]); - - for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { - BarrageModColumnMetadata columnMetadata = barrageUpdate.modColumnNodes(columnIndex); - RangeSet modifiedRows = new CompressedRangeSetReader() - .read(columnMetadata.modifiedRowsAsByteBuffer()); - numModRowsRemaining = Math.max(numModRowsRemaining, modifiedRows.size()); - } - } - - /** - * Appends a new record batch and payload. Returns true if this was the final record batch that was expected. - */ - public boolean appendRecordBatch(RecordBatch recordBatch, ByteBuffer body) { - if (numAddRowsRemaining > 0) { - handleAddBatch(recordBatch, body); - } else if (numModRowsRemaining > 0) { - handleModBatch(recordBatch, body); - } - // return true when complete - return numAddRowsRemaining == 0 && numModRowsRemaining == 0; - } - - private void handleAddBatch(RecordBatch recordBatch, ByteBuffer body) { - Iter nodes = new Iter<>( - IntStream.range(0, (int) recordBatch.nodesLength()).mapToObj(recordBatch::nodes).iterator()); - Iter buffers = new Iter<>( - IntStream.range(0, (int) recordBatch.buffersLength()).mapToObj(recordBatch::buffers).iterator()); - - DeltaUpdates.ColumnAdditions[] addedColumnData = new DeltaUpdates.ColumnAdditions[columnTypes.length]; - for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { - assert nodes.hasNext() && buffers.hasNext(); - ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length(), - columnTypes[columnIndex]); - - addedColumnData[columnIndex] = new DeltaUpdates.ColumnAdditions(columnIndex, columnData); - } - deltaUpdates.setSerializedAdditions(addedColumnData); - numAddRowsRemaining -= (long) recordBatch.length(); - } - - private void handleModBatch(RecordBatch recordBatch, ByteBuffer body) { - Iter nodes = new Iter<>( - IntStream.range(0, (int) recordBatch.nodesLength()).mapToObj(recordBatch::nodes).iterator()); - Iter buffers = new Iter<>( - IntStream.range(0, (int) recordBatch.buffersLength()).mapToObj(recordBatch::buffers).iterator()); - - DeltaUpdates.ColumnModifications[] modifiedColumnData = - new DeltaUpdates.ColumnModifications[columnTypes.length]; - for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { - assert nodes.hasNext() && buffers.hasNext(); - - BarrageModColumnMetadata columnMetadata = barrageUpdate.modColumnNodes(columnIndex); - RangeSet modifiedRows = new CompressedRangeSetReader() - .read(columnMetadata.modifiedRowsAsByteBuffer()); - - ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length(), - columnTypes[columnIndex]); - modifiedColumnData[columnIndex] = - new DeltaUpdates.ColumnModifications(columnIndex, modifiedRows, columnData); - } - deltaUpdates.setSerializedModifications(modifiedColumnData); - numModRowsRemaining -= (long) recordBatch.length(); - } - - public DeltaUpdates build() { - return deltaUpdates; - } - } - - private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes, Iter buffers, int size, - String columnType) { - // explicit cast to be clear that we're rounding down - BitSet valid = readValidityBufferAsBitset(data, size, buffers.next()); - FieldNode thisNode = nodes.next(); - boolean hasNulls = thisNode.nullCount() != 0; - size = Math.min(size, (int) thisNode.length()); - - Buffer positions = buffers.next(); - switch (columnType) { - // for simple well-supported typedarray types, wrap and return - case "int": { - assert positions.length() >= size * 4L; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Int32Array intArray = new Int32Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); - return new IntArrayColumnData(Js.uncheckedCast(intArray)); - } - case "short": { - assert positions.length() >= size * 2L; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Int16Array shortArray = new Int16Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); - return new ShortArrayColumnData(Js.uncheckedCast(shortArray)); - } - case "boolean": - case "java.lang.Boolean": { - assert positions.length() >= ((size + 63) / 64); - // booleans are stored as a bitset, but internally we represent booleans as bytes - data.position((int) positions.offset()); - BitSet wireValues = readBitSetWithLength(data, (int) (positions.length())); - Boolean[] boolArray = new Boolean[size]; - for (int i = 0; i < size; ++i) { - if (!hasNulls || valid.get(i)) { - boolArray[i] = wireValues.get(i); - } else { - boolArray[i] = null; - } - } - return new BooleanArrayColumnData(boolArray); - } - case "byte": { - assert positions.length() >= size; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Int8Array byteArray = - new Int8Array(view.buffer, (int) (view.byteOffset + positions.offset()), size); - return new ByteArrayColumnData(Js.uncheckedCast(byteArray)); - } - case "double": { - assert positions.length() >= size * 8L; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Float64Array doubleArray = new Float64Array(view.buffer, - (int) (view.byteOffset + positions.offset()), size); - return new DoubleArrayColumnData(Js.uncheckedCast(doubleArray)); - } - case "float": { - assert positions.length() >= size * 4L; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Float32Array floatArray = new Float32Array(view.buffer, - (int) (view.byteOffset + positions.offset()), size); - return new FloatArrayColumnData(Js.uncheckedCast(floatArray)); - } - case "char": { - assert positions.length() >= size * 2L; - ArrayBufferView view = TypedArrayHelper.unwrap(data); - Uint16Array charArray = new Uint16Array(view.buffer, - (int) (view.byteOffset + positions.offset()), size); - return new CharArrayColumnData(Js.uncheckedCast(charArray)); - } - // longs are a special case despite being java primitives - case "long": - case "java.time.Instant": - case "java.time.ZonedDateTime": { - assert positions.length() >= size * 8L; - long[] longArray = new long[size]; - - data.position((int) positions.offset()); - for (int i = 0; i < size; i++) { - longArray[i] = data.getLong(); - } - return new LongArrayColumnData(longArray); - } - // all other types are read out in some custom way - case "java.time.LocalTime":// LocalDateArrayColumnData - { - assert positions.length() >= size * 6L; - data.position((int) positions.offset()); - LocalDate[] localDateArray = new LocalDate[size]; - for (int i = 0; i < size; i++) { - int year = data.getInt(); - byte month = data.get(); - byte day = data.get(); - localDateArray[i] = new LocalDate(year, month, day); - } - return new LocalDateArrayColumnData(localDateArray); - } - case "java.time.LocalDate":// LocalTimeArrayColumnData - { - assert positions.length() == size * 7L; - LocalTime[] localTimeArray = new LocalTime[size]; - - data.position((int) positions.offset()); - for (int i = 0; i < size; i++) { - int nano = data.getInt(); - byte hour = data.get(); - byte minute = data.get(); - byte second = data.get(); - data.position(data.position() + 1);// aligned for next read - localTimeArray[i] = new LocalTime(hour, minute, second, nano); - } - return new LocalTimeArrayColumnData(localTimeArray); - } - default: - // remaining types have an offset buffer to read first - IntBuffer offsets = readOffsets(data, size, positions); - - if (columnType.endsWith("[]")) { - FieldNode arrayNode = nodes.next(); - int innerSize = (int) arrayNode.length(); - boolean innerHasNulls = arrayNode.nullCount() != 0; - - // array type, also read the inner valid buffer and inner offset buffer - BitSet innerValid = readValidityBufferAsBitset(data, innerSize, buffers.next()); - IntBuffer innerOffsets = readOffsets(data, innerSize, buffers.next()); - - Buffer payload = buffers.next(); - - switch (columnType) { - case "java.lang.String[]": - String[][] strArrArr = new String[size][]; - - for (int i = 0; i < size; i++) { - if (hasNulls && !valid.get(i)) { - strArrArr[i] = null; - continue; - } - int arrayStart = offsets.get(i); - int instanceSize = offsets.get(i + 1) - arrayStart; - String[] strArr = new String[instanceSize]; - for (int j = 0; j < instanceSize; j++) { - int inner = j + arrayStart; - assert innerOffsets != null; - if (innerHasNulls && !innerValid.get(inner)) { - assert innerOffsets.get(inner) == innerOffsets.get(inner + 1) - : innerOffsets.get(inner) + " == " + innerOffsets.get(inner + 1); - strArr[j] = null; - continue; - } - // might be cheaper to do views on the underlying bb (which will be copied anyway - // into the String) - data.position((int) (payload.offset()) + innerOffsets.get(inner)); - int stringSize = innerOffsets.get(inner + 1) - innerOffsets.get(inner); - byte[] stringBytes = new byte[stringSize]; - data.get(stringBytes); - strArr[j] = new String(stringBytes, StandardCharsets.UTF_8); - } - strArrArr[i] = strArr; - } - - return new StringArrayArrayColumnData(strArrArr); - default: - throw new IllegalStateException("Can't decode column of type " + columnType); - } - - } else { - // non-array, variable length stuff, just grab the buffer and read ranges specified by offsets - Buffer payload = buffers.next(); - - switch (columnType) { - case "java.lang.String": { - String[] stringArray = new String[size]; - byte[] buf = new byte[32]; - for (int i = 0; i < size; i++) { - if (hasNulls && !valid.get(i)) { - stringArray[i] = null; - continue; - } - int ioff = offsets.get(i); - int len = offsets.get(i + 1) - ioff; - data.position((int) (payload.offset()) + ioff); - if (buf.length < len) { - buf = new byte[len]; - } - data.get(buf, 0, len); - stringArray[i] = new String(buf, 0, len, StandardCharsets.UTF_8);// new - // String(Js.uncheckedCast(stringBytes)); - } - return new StringArrayColumnData(stringArray); - } - case "java.math.BigDecimal": { - BigDecimal[] bigDecArray = new BigDecimal[size]; - byte[] buf = null; - for (int i = 0; i < size; i++) { - if (hasNulls && !valid.get(i)) { - bigDecArray[i] = null; - continue; - } - int ioff = offsets.get(i); - int len = offsets.get(i + 1) - ioff; - data.position((int) (payload.offset()) + ioff); - int scale = data.getInt(); - len -= 4; - if (buf == null || buf.length != len) { - buf = new byte[len]; - } - bigDecArray[i] = new BigDecimal(readBigInt(data, buf), scale); - } - return new BigDecimalArrayColumnData(bigDecArray); - } - case "java.math.BigInteger": { - BigInteger[] bigIntArray = new BigInteger[size]; - byte[] buf = null; - for (int i = 0; i < size; i++) { - if (hasNulls && !valid.get(i)) { - bigIntArray[i] = null; - continue; - } - int ioff = offsets.get(i); - int len = offsets.get(i + 1) - ioff; - if (buf == null || buf.length != len) { - buf = new byte[len]; - } - data.position((int) (payload.offset()) + ioff); - bigIntArray[i] = readBigInt(data, buf); - } - return new BigIntegerArrayColumnData(bigIntArray); - } - default: - throw new IllegalStateException("Can't decode column of type " + columnType); - } - } - } - } - - private static BigInteger readBigInt(ByteBuffer data, byte[] buf) { - // TODO: Change to the code below when the Java 9 BigInteger(byte[], int, int) constructor is available. - // https://github.com/deephaven/deephaven-core/issues/1626 - // Make the call take an additional len parameter, and make the calling logic reallocate only when - // there is a need to grow, instead of the current need for an exact match. - // - // data.get(buf, 0, len); - // return new BigInteger(buf, 0, len); - data.get(buf); - return new BigInteger(buf); - } - - private static BitSet readValidityBufferAsBitset(ByteBuffer data, int size, Buffer buffer) { - if (size == 0 || buffer.length() == 0) { - // these buffers are optional (and empty) if the column is empty, or if it has primitives and we've allowed - // DH nulls - return new BitSet(0); - } - data.position((int) buffer.offset()); - BitSet valid = readBitSetWithLength(data, (int) (buffer.length())); - return valid; - } - - private static BitSet readBitSetWithLength(ByteBuffer data, int lenInBytes) { - byte[] array = new byte[lenInBytes]; - data.get(array); - - return BitSet.valueOf(array); - } - - private static IntBuffer readOffsets(ByteBuffer data, int size, Buffer buffer) { - if (size == 0) { - IntBuffer emptyOffsets = IntBuffer.allocate(1); - return emptyOffsets; - } - data.position((int) buffer.offset()); - IntBuffer offsets = data.slice().asIntBuffer(); - offsets.limit(size + 1); - return offsets; - } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java deleted file mode 100644 index 7a72d3806d5..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java +++ /dev/null @@ -1,169 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data; - -import java.io.Serializable; -import java.util.Arrays; - -/** - * A DTO containing the result data from a call to GenerateComparableStatsFunction or GenerateNumericalStatsFunction - */ -public class ColumnStatistics implements Serializable { - public enum ColumnType { - NUMERIC, COMPARABLE, DATETIME, NON_COMPARABLE, - } - - private ColumnType type; - private long size; - private long count; - - // Data from a GenerateComparableStatsFunction - private int numUnique; - private String[] uniqueKeys; - private long[] uniqueValues; - - // Data from a GenerateNumericalStatsFunction - private double sum; - private double absSum; - private double min; - private double max; - private double absMin; - private double absMax; - - // Data from a GenerateDateTimeStatsFunction - private long minDateTime; - private long maxDateTime; - - public ColumnType getType() { - return type; - } - - public void setType(ColumnType type) { - this.type = type; - } - - public long getSize() { - return size; - } - - public void setSize(final long size) { - this.size = size; - } - - public long getCount() { - return count; - } - - public void setCount(long count) { - this.count = count; - } - - public int getNumUnique() { - return numUnique; - } - - public void setNumUnique(int numUnique) { - this.numUnique = numUnique; - } - - public String[] getUniqueKeys() { - return uniqueKeys; - } - - public void setUniqueKeys(String[] uniqueKeys) { - this.uniqueKeys = uniqueKeys; - } - - public long[] getUniqueValues() { - return uniqueValues; - } - - public void setUniqueValues(long[] uniqueValues) { - this.uniqueValues = uniqueValues; - } - - public double getSum() { - return sum; - } - - public void setSum(double sum) { - this.sum = sum; - } - - public double getAbsSum() { - return absSum; - } - - public void setAbsSum(double absSum) { - this.absSum = absSum; - } - - public double getMin() { - return min; - } - - public void setMin(double min) { - this.min = min; - } - - public double getMax() { - return max; - } - - public void setMax(double max) { - this.max = max; - } - - public double getAbsMin() { - return absMin; - } - - public void setAbsMin(double absMin) { - this.absMin = absMin; - } - - public double getAbsMax() { - return absMax; - } - - public void setAbsMax(double absMax) { - this.absMax = absMax; - } - - public long getMinDateTime() { - return minDateTime; - } - - public void setMinDateTime(final long minDateTime) { - this.minDateTime = minDateTime; - } - - public long getMaxDateTime() { - return maxDateTime; - } - - public void setMaxDateTime(final long maxDateTime) { - this.maxDateTime = maxDateTime; - } - - @Override - public String toString() { - return "ColumnStatistics{" + - "type=" + type + - ", size=" + size + - ", count=" + count + - ", numUnique=" + numUnique + - ", uniqueKeys=" + Arrays.toString(uniqueKeys) + - ", uniqueValues=" + Arrays.toString(uniqueValues) + - ", sum=" + sum + - ", absSum=" + absSum + - ", min=" + min + - ", max=" + max + - ", absMin=" + absMin + - ", absMax=" + absMax + - ", minDateTime=" + minDateTime + - ", maxDateTime=" + maxDateTime + - '}'; - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java deleted file mode 100644 index f6e3e59a753..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java +++ /dev/null @@ -1,174 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data; - -import io.deephaven.web.shared.data.columns.ColumnData; - -import java.io.Serializable; - -public class DeltaUpdates implements Serializable { - public static class ColumnAdditions implements Serializable { - private int columnIndex; - private ColumnData values; - - public ColumnAdditions() {} - - public ColumnAdditions(int colIndex, ColumnData values) { - setColumnIndex(colIndex); - setValues(values); - } - - public int getColumnIndex() { - return columnIndex; - } - - public void setColumnIndex(final int columnIndex) { - this.columnIndex = columnIndex; - } - - public ColumnData getValues() { - return values; - } - - public void setValues(final ColumnData values) { - this.values = values; - } - } - public static class ColumnModifications implements Serializable { - private int columnIndex; - private RangeSet rowsIncluded; - public ColumnData values; - - public ColumnModifications() {} - - public ColumnModifications(int columnIndex, RangeSet includedModifications, ColumnData columnData) { - setColumnIndex(columnIndex); - setRowsIncluded(includedModifications); - setValues(columnData); - } - - public int getColumnIndex() { - return columnIndex; - } - - public void setColumnIndex(final int columnIndex) { - this.columnIndex = columnIndex; - } - - public RangeSet getRowsIncluded() { - return rowsIncluded; - } - - public void setRowsIncluded(final RangeSet rowsIncluded) { - this.rowsIncluded = rowsIncluded; - } - - public ColumnData getValues() { - return values; - } - - public void setValues(final ColumnData values) { - this.values = values; - } - } - - private long deltaSequence; - private long firstStep; - private long lastStep; - - private RangeSet added; - private RangeSet removed; - - private ShiftedRange[] shiftedRanges; - - private RangeSet includedAdditions; - - private ColumnAdditions[] serializedAdditions; - private ColumnModifications[] serializedModifications; - - public DeltaUpdates() {} - - public DeltaUpdates(RangeSet added, RangeSet removed, ShiftedRange[] shifted, RangeSet includedAdditions, - ColumnAdditions[] addedColumnData, ColumnModifications[] modifiedColumnData) { - setAdded(added); - setRemoved(removed); - setShiftedRanges(shifted); - setIncludedAdditions(includedAdditions); - setSerializedAdditions(addedColumnData); - setSerializedModifications(modifiedColumnData); - } - - - public long getDeltaSequence() { - return deltaSequence; - } - - public void setDeltaSequence(final long deltaSequence) { - this.deltaSequence = deltaSequence; - } - - public long getFirstStep() { - return firstStep; - } - - public void setFirstStep(final long firstStep) { - this.firstStep = firstStep; - } - - public long getLastStep() { - return lastStep; - } - - public void setLastStep(final long lastStep) { - this.lastStep = lastStep; - } - - public RangeSet getAdded() { - return added; - } - - public void setAdded(final RangeSet added) { - this.added = added; - } - - public RangeSet getRemoved() { - return removed; - } - - public void setRemoved(final RangeSet removed) { - this.removed = removed; - } - - public ShiftedRange[] getShiftedRanges() { - return shiftedRanges; - } - - public void setShiftedRanges(final ShiftedRange[] shiftedRanges) { - this.shiftedRanges = shiftedRanges; - } - - public RangeSet getIncludedAdditions() { - return includedAdditions; - } - - public void setIncludedAdditions(final RangeSet includedAdditions) { - this.includedAdditions = includedAdditions; - } - - public ColumnAdditions[] getSerializedAdditions() { - return serializedAdditions; - } - - public void setSerializedAdditions(final ColumnAdditions[] serializedAdditions) { - this.serializedAdditions = serializedAdditions; - } - - public ColumnModifications[] getSerializedModifications() { - return serializedModifications; - } - - public void setSerializedModifications(final ColumnModifications[] serializedModifications) { - this.serializedModifications = serializedModifications; - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSnapshot.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSnapshot.java deleted file mode 100644 index ae5f8870184..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSnapshot.java +++ /dev/null @@ -1,60 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data; - -import io.deephaven.web.shared.data.columns.ColumnData; - -import java.io.Serializable; - -public class TableSnapshot implements Serializable { - public enum SnapshotType { - INITIAL_SNAPSHOT, UPDATE_COLUMN_SNAPSHOT, UPDATE_ROW_SNAPSHOT, FORCED_SNAPSHOT, - } - - private SnapshotType snapshotType; - - private ColumnData[] dataColumns; - private RangeSet includedRows; - - private long tableSize; - - public TableSnapshot() {} - - public TableSnapshot(RangeSet includedAdditions, ColumnData[] dataColumns, long tableSize) { - this.snapshotType = SnapshotType.INITIAL_SNAPSHOT; - - this.dataColumns = dataColumns; - this.includedRows = includedAdditions; - - this.tableSize = tableSize; - } - - public SnapshotType getSnapshotType() { - return snapshotType; - } - - public ColumnData[] getDataColumns() { - return dataColumns; - } - - public void setDataColumns(ColumnData[] dataColumns) { - this.dataColumns = dataColumns; - } - - public RangeSet getIncludedRows() { - return includedRows; - } - - public void setIncludedRows(RangeSet includedRows) { - this.includedRows = includedRows; - } - - public void setTableSize(long tableSize) { - this.tableSize = tableSize; - } - - public long getTableSize() { - return tableSize; - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java deleted file mode 100644 index 3dc940db123..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableSubscriptionRequest.java +++ /dev/null @@ -1,76 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data; - -import javax.annotation.Nullable; -import java.io.Serializable; -import java.util.BitSet; - -/** - * A class to encapsulate updates to a given table's subscription. - * - * Because the server is now managing the creation of a "tail table" to only subscribe to updates after all filters have - * run, we are giving the server a bit more rights w.r.t. subscription state management. - * - * The client will now send an array of ViewportSubscription, each containing a subscriptionId (JsTable id) and a - * {@link Viewport} object. - * - * The server will be responsible for merging ranges and columns to create flattened tables with the desired viewports. - * - * For now, we'll likely preserve "bunch them all together" semantics, but we should do performance testing to identify - * if we'll get better performance from having multiple tables of smaller viewport scope (more messages on the wire, but - * less work to do before sending messages). - * - * The {@link #columns} must be non-null (and almost always non-empty), but {@link #rows} may be null to indicate a - * non-viewport subscription. - */ -public class TableSubscriptionRequest implements Serializable { - - private int subscriptionId; - private RangeSet rows; - private BitSet columns; - - public TableSubscriptionRequest() { - - } - - public TableSubscriptionRequest(int subscriptionId, @Nullable RangeSet rows, BitSet columns) { - this.subscriptionId = subscriptionId; - this.rows = rows; - this.columns = columns; - } - - public int getSubscriptionId() { - return subscriptionId; - } - - public RangeSet getRows() { - return rows; - } - - public BitSet getColumns() { - return columns; - } - - void setSubscriptionId(int subscriptionId) { - this.subscriptionId = subscriptionId; - } - - void setRows(RangeSet rows) { - this.rows = rows; - } - - void setColumns(BitSet columns) { - this.columns = columns; - } - - @Override - public String toString() { - return "TableSubscriptionRequest{" + - "subscriptionId=" + subscriptionId + - ", rows=" + rows + - ", columns=" + columns + - '}'; - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Viewport.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Viewport.java deleted file mode 100644 index bdc19ce9e39..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Viewport.java +++ /dev/null @@ -1,75 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data; - -import java.io.Serializable; -import java.util.BitSet; - -public class Viewport implements Serializable { - - private final RangeSet rows; - private final BitSet columns; - - public Viewport() { - this(new RangeSet(), new BitSet()); - } - - public Viewport(RangeSet rows, BitSet columns) { - this.rows = rows; - this.columns = columns; - } - - public Viewport merge(Viewport other) { - RangeSet mergedRows = new RangeSet(); - rows.rangeIterator().forEachRemaining(mergedRows::addRange); - other.rows.rangeIterator().forEachRemaining(mergedRows::addRange); - - BitSet mergedColumns = new BitSet(); - mergedColumns.or(columns); - mergedColumns.or(other.columns); - - return new Viewport(mergedRows, mergedColumns); - } - - public RangeSet getRows() { - return rows; - } - - public BitSet getColumns() { - return columns; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - final Viewport viewport = (Viewport) o; - - if (!rows.equals(viewport.rows)) - return false; - return columns.equals(viewport.columns); - } - - @Override - public int hashCode() { - int result = rows.hashCode(); - result = 31 * result + columns.hashCode(); - return result; - } - - @Override - public String toString() { - return "Viewport{" + - "rows=" + rows + - ", columns=" + columns + - '}'; - } - - public boolean isEmpty() { - return rows.size() == 0 && columns.isEmpty(); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigDecimalArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigDecimalArrayColumnData.java deleted file mode 100644 index 9a6bed67823..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigDecimalArrayColumnData.java +++ /dev/null @@ -1,44 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.math.BigDecimal; -import java.util.Arrays; - -/** - * Holder for data associated with a column of type java.math.BigDecimal. - */ -public class BigDecimalArrayColumnData extends ColumnData { - private BigDecimal[] data; - - public BigDecimalArrayColumnData() {} - - public BigDecimalArrayColumnData(BigDecimal[] data) { - this.data = data; - } - - public BigDecimal[] getData() { - return data; - } - - public void setData(BigDecimal[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - final BigDecimalArrayColumnData that = (BigDecimalArrayColumnData) o; - return Arrays.deepEquals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(data); - } -} - diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigIntegerArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigIntegerArrayColumnData.java deleted file mode 100644 index b9eb9db11fc..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BigIntegerArrayColumnData.java +++ /dev/null @@ -1,44 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.math.BigInteger; -import java.util.Arrays; - -/** - * Holder for data associated with a column of type java.math.BigInteger. - */ -public class BigIntegerArrayColumnData extends ColumnData { - private BigInteger[] data; - - public BigIntegerArrayColumnData() {} - - public BigIntegerArrayColumnData(BigInteger[] data) { - this.data = data; - } - - public BigInteger[] getData() { - return data; - } - - public void setData(BigInteger[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - final BigIntegerArrayColumnData that = (BigIntegerArrayColumnData) o; - return Arrays.deepEquals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(data); - } -} - diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BooleanArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BooleanArrayColumnData.java deleted file mode 100644 index 487c6592ca2..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/BooleanArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class BooleanArrayColumnData extends ColumnData { - private Boolean[] data; - - public BooleanArrayColumnData() {} - - public BooleanArrayColumnData(Boolean[] data) { - this.data = data; - } - - public Boolean[] getData() { - return data; - } - - public void setData(Boolean[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - BooleanArrayColumnData that = (BooleanArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ByteArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ByteArrayColumnData.java deleted file mode 100644 index 7bbe9a1b710..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ByteArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class ByteArrayColumnData extends ColumnData { - private byte[] data; - - public ByteArrayColumnData() {} - - public ByteArrayColumnData(byte[] data) { - this.data = data; - } - - public byte[] getData() { - return data; - } - - public void setData(byte[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - ByteArrayColumnData that = (ByteArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/CharArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/CharArrayColumnData.java deleted file mode 100644 index 22aae999562..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/CharArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class CharArrayColumnData extends ColumnData { - private char[] data; - - public CharArrayColumnData() {} - - public CharArrayColumnData(char[] data) { - this.data = data; - } - - public char[] getData() { - return data; - } - - public void setData(char[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - CharArrayColumnData that = (CharArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ColumnData.java deleted file mode 100644 index 20817d4672a..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ColumnData.java +++ /dev/null @@ -1,11 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.io.Serializable; - -public abstract class ColumnData implements Serializable { - - public abstract Object getData(); -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/DoubleArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/DoubleArrayColumnData.java deleted file mode 100644 index 6864217cfb4..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/DoubleArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class DoubleArrayColumnData extends ColumnData { - private double[] data; - - public DoubleArrayColumnData() {} - - public DoubleArrayColumnData(double[] data) { - this.data = data; - } - - public double[] getData() { - return data; - } - - public void setData(double[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - DoubleArrayColumnData that = (DoubleArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/FloatArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/FloatArrayColumnData.java deleted file mode 100644 index 8fd13a5962f..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/FloatArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class FloatArrayColumnData extends ColumnData { - private float[] data; - - public FloatArrayColumnData() {} - - public FloatArrayColumnData(float[] data) { - this.data = data; - } - - public float[] getData() { - return data; - } - - public void setData(float[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - FloatArrayColumnData that = (FloatArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/IntArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/IntArrayColumnData.java deleted file mode 100644 index 49b1b2e4d86..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/IntArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class IntArrayColumnData extends ColumnData { - private int[] data; - - public IntArrayColumnData() {} - - public IntArrayColumnData(int[] data) { - this.data = data; - } - - public int[] getData() { - return data; - } - - public void setData(int[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - IntArrayColumnData that = (IntArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalDateArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalDateArrayColumnData.java deleted file mode 100644 index 4fd4226279e..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalDateArrayColumnData.java +++ /dev/null @@ -1,46 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import io.deephaven.web.shared.data.LocalDate; - -import java.util.Arrays; - -/** - * Holder for data associated with a column of type java.time.LocalDate. This type is serialized with a custom LocalDate - * type for efficiency and GWT compatibility. - */ -public class LocalDateArrayColumnData extends ColumnData { - private LocalDate[] data; - - public LocalDateArrayColumnData() {} - - public LocalDateArrayColumnData(LocalDate[] data) { - this.data = data; - } - - public LocalDate[] getData() { - return data; - } - - public void setData(LocalDate[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - final LocalDateArrayColumnData that = (LocalDateArrayColumnData) o; - return Arrays.deepEquals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(data); - } -} - diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalTimeArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalTimeArrayColumnData.java deleted file mode 100644 index 2e8ddc291c3..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LocalTimeArrayColumnData.java +++ /dev/null @@ -1,45 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import io.deephaven.web.shared.data.LocalTime; - -import java.util.Arrays; - -/** - * Holder for data associated with a column of type java.time.LocalTime. This type is serialized with a custom LocalTime - * type for efficiency and GWT compatibility. - */ -public class LocalTimeArrayColumnData extends ColumnData { - private LocalTime[] data; - - public LocalTimeArrayColumnData() {} - - public LocalTimeArrayColumnData(LocalTime[] data) { - this.data = data; - } - - public LocalTime[] getData() { - return data; - } - - public void setData(LocalTime[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - final LocalTimeArrayColumnData that = (LocalTimeArrayColumnData) o; - return Arrays.deepEquals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LongArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LongArrayColumnData.java deleted file mode 100644 index a9e252568b1..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/LongArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class LongArrayColumnData extends ColumnData { - private long[] data; - - public LongArrayColumnData() {} - - public LongArrayColumnData(long[] data) { - this.data = data; - } - - public long[] getData() { - return data; - } - - public void setData(long[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - LongArrayColumnData that = (LongArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ShortArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ShortArrayColumnData.java deleted file mode 100644 index d8c0540ce96..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/ShortArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class ShortArrayColumnData extends ColumnData { - private short[] data; - - public ShortArrayColumnData() {} - - public ShortArrayColumnData(short[] data) { - this.data = data; - } - - public short[] getData() { - return data; - } - - public void setData(short[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - ShortArrayColumnData that = (ShortArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayArrayColumnData.java deleted file mode 100644 index 721e0aabd72..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class StringArrayArrayColumnData extends ColumnData { - private String[][] data; - - public StringArrayArrayColumnData() {} - - public StringArrayArrayColumnData(String[][] data) { - this.data = data; - } - - public String[][] getData() { - return data; - } - - public void setData(String[][] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - StringArrayArrayColumnData that = (StringArrayArrayColumnData) o; - - return Arrays.deepEquals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.deepHashCode(data); - } -} diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayColumnData.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayColumnData.java deleted file mode 100644 index 338402855bf..00000000000 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/columns/StringArrayColumnData.java +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending -// -package io.deephaven.web.shared.data.columns; - -import java.util.Arrays; - -public class StringArrayColumnData extends ColumnData { - private String[] data; - - public StringArrayColumnData() {} - - public StringArrayColumnData(String[] data) { - this.data = data; - } - - public String[] getData() { - return data; - } - - public void setData(String[] data) { - this.data = data; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - StringArrayColumnData that = (StringArrayColumnData) o; - - return Arrays.equals(data, that.data); - } - - @Override - public int hashCode() { - return Arrays.hashCode(data); - } -} From 4f9bd98b6635919e7c68150a48aec8796b52ffb5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 1 Jul 2024 11:35:45 -0500 Subject: [PATCH 106/219] Better null handling, fixed tree api calls --- .../replicators/ReplicateBarrageUtils.java | 3 + .../api/barrage/WebChunkReaderFactory.java | 34 ++++- .../barrage/data/WebBarrageSubscription.java | 125 +++--------------- .../api/barrage/data/WebByteColumnData.java | 27 ++++ .../api/barrage/data/WebCharColumnData.java | 23 ++++ .../api/barrage/data/WebColumnData.java | 27 ++++ .../api/barrage/data/WebDoubleColumnData.java | 27 ++++ .../api/barrage/data/WebFloatColumnData.java | 27 ++++ .../api/barrage/data/WebIntColumnData.java | 27 ++++ .../api/barrage/data/WebLongColumnData.java | 27 ++++ .../api/barrage/data/WebObjectColumnData.java | 22 +++ .../api/barrage/data/WebShortColumnData.java | 27 ++++ .../AbstractTableSubscription.java | 16 +-- .../web/client/api/tree/JsTreeTable.java | 12 +- .../web/client/state/ClientTableState.java | 2 +- 15 files changed, 302 insertions(+), 124 deletions(-) create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebByteColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebDoubleColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebFloatColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebIntColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebLongColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebObjectColumnData.java create mode 100644 web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebShortColumnData.java diff --git a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java index 6824f8d91f9..35dadff92d0 100644 --- a/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java +++ b/replication/static/src/main/java/io/deephaven/replicators/ReplicateBarrageUtils.java @@ -36,6 +36,9 @@ public static void main(final String[] args) throws IOException { fixupVectorExpansionKernel(CHUNK_PACKAGE + "/vector/IntVectorExpansionKernel.java", "Int"); fixupVectorExpansionKernel(CHUNK_PACKAGE + "/vector/LongVectorExpansionKernel.java", "Long"); fixupVectorExpansionKernel(CHUNK_PACKAGE + "/vector/DoubleVectorExpansionKernel.java", "Double"); + + ReplicatePrimitiveCode.charToAllButBoolean("replicateBarrageUtils", + "web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java"); } private static void fixupVectorExpansionKernel(final @NotNull String path, final @NotNull String type) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index 6bf6b7595de..aa7c4077776 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -3,6 +3,10 @@ // package io.deephaven.web.client.api.barrage; +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableByteChunk; +import io.deephaven.chunk.WritableObjectChunk; +import io.deephaven.chunk.attributes.Values; import io.deephaven.extensions.barrage.chunk.BooleanChunkReader; import io.deephaven.extensions.barrage.chunk.ByteChunkReader; import io.deephaven.extensions.barrage.chunk.CharChunkReader; @@ -15,6 +19,7 @@ import io.deephaven.extensions.barrage.chunk.VarBinaryChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.VarListChunkReader; import io.deephaven.extensions.barrage.util.StreamReaderOptions; +import io.deephaven.util.BooleanUtils; import io.deephaven.web.client.api.DateWrapper; import io.deephaven.web.client.api.LongWrapper; import org.apache.arrow.flatbuf.Date; @@ -106,7 +111,34 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t outChunk, outOffset, totalRows); } case Type.Bool: { - return new BooleanChunkReader(); + BooleanChunkReader subReader = new BooleanChunkReader(); + return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> { + try (final WritableByteChunk inner = (WritableByteChunk) subReader.readChunk( + fieldNodeIter, bufferInfoIter, is, null, 0, 0)) { + + final WritableObjectChunk chunk; + if (outChunk != null) { + chunk = outChunk.asWritableObjectChunk(); + } else { + int numRows = Math.max(totalRows, inner.size()); + chunk = WritableObjectChunk.makeWritableChunk(numRows); + chunk.setSize(numRows); + } + + if (outChunk == null) { + // if we're not given an output chunk then we better be writing at the front of the new one + Assert.eqZero(outOffset, "outOffset"); + } + + for (int ii = 0; ii < inner.size(); ++ii) { + byte value = inner.get(ii); + chunk.set(outOffset + ii, BooleanUtils.byteAsBoolean(value)); + } + + return chunk; + } + + }; } case Type.Date: { Date t = new Date(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 0db006dd94d..85556e02b88 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -44,118 +44,35 @@ public abstract class WebBarrageSubscription { public static WebBarrageSubscription subscribe(ClientTableState cts, ViewportChangedHandler viewportChangedHandler, DataChangedHandler dataChangedHandler) { - WebDataSink[] dataSinks = new WebDataSink[cts.columnTypes().length]; + WebColumnData[] dataSinks = new WebColumnData[cts.columnTypes().length]; for (int i = 0; i < dataSinks.length; i++) { JsArray arr = JsData.newArray(cts.columnTypes()[i].getCanonicalName()); switch (cts.chunkTypes()[i]) { case Boolean: - break; + throw new IllegalStateException("Boolean unsupported here"); case Char: + dataSinks[i] = new WebCharColumnData(); break; case Byte: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - ByteChunk byteChunk = data.asByteChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(byteChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; + dataSinks[i] = new WebByteColumnData(); break; case Short: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - ShortChunk shortChunk = data.asShortChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(shortChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; - + dataSinks[i] = new WebShortColumnData(); break; case Int: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - IntChunk intChunk = data.asIntChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(intChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; + dataSinks[i] = new WebIntColumnData(); break; case Long: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - LongChunk longChunk = data.asLongChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(longChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; + dataSinks[i] = new WebLongColumnData(); break; case Float: + dataSinks[i] = new WebFloatColumnData(); break; case Double: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - DoubleChunk doubleChunk = data.asDoubleChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(doubleChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; + dataSinks[i] = new WebDoubleColumnData(); break; case Object: - dataSinks[i] = new WebDataSink() { - @Override - public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { - ObjectChunk objectChunk = data.asObjectChunk(); - int i = 0; - while (destIterator.hasNext()) { - arr.setAt((int) destIterator.nextLong(), Js.asAny(objectChunk.get(i++))); - } - } - - @Override - public T get(long position) { - return (T) arr.getAt((int) position); - } - }; + dataSinks[i] = new WebObjectColumnData(); break; } } @@ -174,28 +91,20 @@ void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, BitSet modifiedColumnSet); } - public interface WebDataSink { - void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator); - - default void ensureCapacity(long size) {} - - T get(long position); - } - protected final ClientTableState state; protected final ViewportChangedHandler viewportChangedHandler; protected final DataChangedHandler dataChangedHandler; protected final RangeSet currentRowSet = RangeSet.empty(); protected long capacity = 0; - protected WebDataSink[] destSources; + protected WebColumnData[] destSources; protected RangeSet serverViewport; protected BitSet serverColumns; protected boolean serverReverseViewport; protected WebBarrageSubscription(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { + DataChangedHandler dataChangedHandler, WebColumnData[] dataSinks) { this.state = state; destSources = dataSinks; this.viewportChangedHandler = viewportChangedHandler; @@ -238,7 +147,7 @@ public boolean isReversed() { * @return the value read from the table * @param the expected type of the column to read */ - public abstract T getData(long key, int col); + public abstract Any getData(long key, int col); protected boolean isSubscribedColumn(int ii) { return serverColumns == null || serverColumns.get(ii); @@ -252,7 +161,7 @@ enum Mode { private final Mode mode; public BlinkImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { + DataChangedHandler dataChangedHandler, WebColumnData[] dataSinks) { super(state, viewportChangedHandler, dataChangedHandler, dataSinks); mode = Mode.BLINK; } @@ -303,7 +212,7 @@ public void applyUpdates(WebBarrageMessage message) { } @Override - public T getData(long key, int col) { + public Any getData(long key, int col) { return destSources[col].get(key); } } @@ -313,7 +222,7 @@ public static class RedirectedImpl extends WebBarrageSubscription { private final TreeMap redirectedIndexes = new TreeMap<>(); public RedirectedImpl(ClientTableState state, ViewportChangedHandler viewportChangedHandler, - DataChangedHandler dataChangedHandler, WebDataSink[] dataSinks) { + DataChangedHandler dataChangedHandler, WebColumnData[] dataSinks) { super(state, viewportChangedHandler, dataChangedHandler, dataSinks); } @@ -473,7 +382,7 @@ public void applyUpdates(WebBarrageMessage message) { } @Override - public T getData(long key, int col) { + public Any getData(long key, int col) { return this.destSources[col].get(redirectedIndexes.get(key)); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebByteColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebByteColumnData.java new file mode 100644 index 00000000000..4f0593d05a9 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebByteColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.ByteChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebByteColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ByteChunk byteChunk = data.asByteChunk(); + int i = 0; + while (destIterator.hasNext()) { + byte value = byteChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_BYTE ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java new file mode 100644 index 00000000000..dfe561d90df --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebCharColumnData.java @@ -0,0 +1,23 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.CharChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebCharColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + CharChunk charChunk = data.asCharChunk(); + int i = 0; + while (destIterator.hasNext()) { + char value = charChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_CHAR ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebColumnData.java new file mode 100644 index 00000000000..3c8fee323b4 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage.data; + +import elemental2.core.JsArray; +import io.deephaven.chunk.Chunk; +import jsinterop.base.Any; + +import java.util.PrimitiveIterator; + +/** + * Holds data from or intended for web clients, normalizing over nulls, with helpers to handle typed chunks. + */ +public abstract class WebColumnData { + protected final JsArray arr = new JsArray<>(); + + public abstract void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator); + + public void ensureCapacity(long size) { + // Current impl does nothing, js arrays don't behave better when told the size up front + } + + public Any get(long position) { + return arr.getAt((int) position); + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebDoubleColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebDoubleColumnData.java new file mode 100644 index 00000000000..d2c8e764ce7 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebDoubleColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.DoubleChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebDoubleColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + DoubleChunk doubleChunk = data.asDoubleChunk(); + int i = 0; + while (destIterator.hasNext()) { + double value = doubleChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_DOUBLE ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebFloatColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebFloatColumnData.java new file mode 100644 index 00000000000..a624affbda6 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebFloatColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.FloatChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebFloatColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + FloatChunk floatChunk = data.asFloatChunk(); + int i = 0; + while (destIterator.hasNext()) { + float value = floatChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_FLOAT ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebIntColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebIntColumnData.java new file mode 100644 index 00000000000..996cf43c6a8 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebIntColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.IntChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebIntColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + IntChunk intChunk = data.asIntChunk(); + int i = 0; + while (destIterator.hasNext()) { + int value = intChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_INT ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebLongColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebLongColumnData.java new file mode 100644 index 00000000000..080c05e6034 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebLongColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebLongColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + LongChunk longChunk = data.asLongChunk(); + int i = 0; + while (destIterator.hasNext()) { + long value = longChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_LONG ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebObjectColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebObjectColumnData.java new file mode 100644 index 00000000000..251bca22e67 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebObjectColumnData.java @@ -0,0 +1,22 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ObjectChunk; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebObjectColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ObjectChunk objectChunk = data.asObjectChunk(); + int i = 0; + while (destIterator.hasNext()) { + Object value = objectChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebShortColumnData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebShortColumnData.java new file mode 100644 index 00000000000..328a0f654a4 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebShortColumnData.java @@ -0,0 +1,27 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +// ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY +// ****** Edit WebCharColumnData and run "./gradlew replicateBarrageUtils" to regenerate +// +// @formatter:off +package io.deephaven.web.client.api.barrage.data; + +import io.deephaven.chunk.ShortChunk; +import io.deephaven.chunk.Chunk; +import io.deephaven.util.QueryConstants; +import jsinterop.base.Js; + +import java.util.PrimitiveIterator; + +public class WebShortColumnData extends WebColumnData { + @Override + public void fillChunk(Chunk data, PrimitiveIterator.OfLong destIterator) { + ShortChunk shortChunk = data.asShortChunk(); + int i = 0; + while (destIterator.hasNext()) { + short value = shortChunk.get(i++); + arr.setAt((int) destIterator.nextLong(), value == QueryConstants.NULL_SHORT ? null : Js.asAny(value)); + } + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index b0e95c940ff..1eb5394cce0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -266,16 +266,16 @@ public Format getFormat(Column column) { String numberFormat = null; String formatString = null; if (column.getStyleColumnIndex() != null) { - cellColors = subscription.getData(index, column.getStyleColumnIndex()); + cellColors = subscription.getData(index, column.getStyleColumnIndex()).uncheckedCast(); } if (rowStyleColumn != TableData.NO_ROW_FORMAT_COLUMN) { - rowColors = subscription.getData(index, rowStyleColumn); + rowColors = subscription.getData(index, rowStyleColumn).uncheckedCast(); } if (column.getFormatStringColumnIndex() != null) { - numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()); + numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()).uncheckedCast(); } if (column.getFormatStringColumnIndex() != null) { - formatString = subscription.getData(index, column.getFormatStringColumnIndex()); + formatString = subscription.getData(index, column.getFormatStringColumnIndex()).uncheckedCast(); } return new Format(cellColors, rowColors, numberFormat, formatString); } @@ -369,16 +369,16 @@ public Format getFormat(long index, Column column) { String numberFormat = null; String formatString = null; if (column.getStyleColumnIndex() != null) { - cellColors = subscription.getData(index, column.getStyleColumnIndex()); + cellColors = subscription.getData(index, column.getStyleColumnIndex()).uncheckedCast(); } if (rowStyleColumn != NO_ROW_FORMAT_COLUMN) { - rowColors = subscription.getData(index, rowStyleColumn); + rowColors = subscription.getData(index, rowStyleColumn).uncheckedCast(); } if (column.getFormatStringColumnIndex() != null) { - numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()); + numberFormat = subscription.getData(index, column.getFormatStringColumnIndex()).uncheckedCast(); } if (column.getFormatStringColumnIndex() != null) { - formatString = subscription.getData(index, column.getFormatStringColumnIndex()); + formatString = subscription.getData(index, column.getFormatStringColumnIndex()).uncheckedCast(); } return new Format(cellColors, rowColors, numberFormat, formatString); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 0747bc7d4c2..1e16c54ca88 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -441,7 +441,7 @@ public Any getData(int index, Column column) { // no constituent column, call super return super.getData(index, column); } - if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + if (barrageSubscription.getData(index, rowDepthCol.getIndex()).asInt() != constituentDepth) { // not at constituent depth, call super return super.getData(index, column); } @@ -456,7 +456,7 @@ public Any getData(long index, Column column) { // no constituent column, call super return super.getData(index, column); } - if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + if (barrageSubscription.getData(index, rowDepthCol.getIndex()).asInt() != constituentDepth) { // not at constituent depth, call super return super.getData(index, column); } @@ -471,7 +471,7 @@ public Format getFormat(int index, Column column) { // no constituent column, call super return super.getFormat(index, column); } - if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + if (barrageSubscription.getData(index, rowDepthCol.getIndex()).asInt() != constituentDepth) { // not at constituent depth, call super return super.getFormat(index, column); } @@ -486,7 +486,7 @@ public Format getFormat(long index, Column column) { // no constituent column, call super return super.getFormat(index, column); } - if (barrageSubscription.getData(index, rowDepthCol.getIndex()) != constituentDepth) { + if (barrageSubscription.getData(index, rowDepthCol.getIndex()).asInt() != constituentDepth) { // not at constituent depth, call super return super.getFormat(index, column); } @@ -536,7 +536,7 @@ public TreeRow(WebBarrageSubscription subscription, long index) { */ @JsProperty(name = "isExpanded") public boolean isExpanded() { - return barrageSubscription.getData(index, rowExpandedCol.getIndex()) == Boolean.TRUE; + return barrageSubscription.getData(index, rowExpandedCol.getIndex()).uncheckedCast() == Boolean.TRUE; } /** @@ -547,7 +547,7 @@ public boolean isExpanded() { */ @JsProperty(name = "hasChildren") public boolean hasChildren() { - return barrageSubscription.getData(index, rowExpandedCol.getIndex()) != null; + return barrageSubscription.getData(index, rowExpandedCol.getIndex()).uncheckedCast() != null; } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index d1f6c8d8bdd..bd7fbfc3023 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -62,7 +62,7 @@ public ChunkType[] chunkTypes() { // be rewritten to skip the trip through Class return Arrays.stream(columnTypes()).map(dataType -> { if (dataType == Boolean.class || dataType == boolean.class) { - return ChunkType.Byte; + return ChunkType.Object; } // JS client holds date objects as objects, not as longs // if (dataType == DateWrapper.class) { From 28ced21d9691aceacfca18029d80dd0d86af08f1 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 06:49:02 -0500 Subject: [PATCH 107/219] Correctly support char/short, BigInteger/Decimal wrappers --- .../api/barrage/WebChunkReaderFactory.java | 22 +++++++++++++++---- .../web/client/state/ClientTableState.java | 7 ++++++ 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index aa7c4077776..5cc12ffaf68 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -16,10 +16,13 @@ import io.deephaven.extensions.barrage.chunk.FloatChunkReader; import io.deephaven.extensions.barrage.chunk.IntChunkReader; import io.deephaven.extensions.barrage.chunk.LongChunkReader; +import io.deephaven.extensions.barrage.chunk.ShortChunkReader; import io.deephaven.extensions.barrage.chunk.VarBinaryChunkInputStreamGenerator; import io.deephaven.extensions.barrage.chunk.VarListChunkReader; import io.deephaven.extensions.barrage.util.StreamReaderOptions; import io.deephaven.util.BooleanUtils; +import io.deephaven.web.client.api.BigDecimalWrapper; +import io.deephaven.web.client.api.BigIntegerWrapper; import io.deephaven.web.client.api.DateWrapper; import io.deephaven.web.client.api.LongWrapper; import org.apache.arrow.flatbuf.Date; @@ -37,6 +40,12 @@ import java.nio.charset.StandardCharsets; import java.util.Arrays; +/** + * Browser-compatible implementation of the ChunkReaderFactory, with a focus on reading from arrow types rather than + * successfully round-tripping to the Java server. + *

+ * Includes some specific workarounds to handle nullability that will make more sense for the browser. + */ public class WebChunkReaderFactory implements ChunkReaderFactory { @Override public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo typeInfo) { @@ -49,6 +58,9 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t return new ByteChunkReader(options); } case 16: { + if (t.isSigned()) { + return new ShortChunkReader(options); + } return new CharChunkReader(options); } case 32: { @@ -77,16 +89,16 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t } } case Type.Binary: { - if (typeInfo.type() == BigInteger.class) { + if (typeInfo.type() == BigIntegerWrapper.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( is, fieldNodeIter, bufferInfoIter, - BigInteger::new, + (val, off, len) -> new BigIntegerWrapper(new BigInteger(val, off, len)), outChunk, outOffset, totalRows); } - if (typeInfo.type() == BigDecimal.class) { + if (typeInfo.type() == BigDecimalWrapper.class) { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, totalRows) -> VarBinaryChunkInputStreamGenerator.extractChunkFromInputStream( is, @@ -99,10 +111,12 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t final byte b3 = buf[offset + 2]; final byte b4 = buf[offset + 3]; final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - return new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + BigDecimal bigDecimal = new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + return new BigDecimalWrapper(bigDecimal); }, outChunk, outOffset, totalRows); } + throw new IllegalArgumentException("Unsupported Binary type " + typeInfo.type()); } case Type.Utf8: { return (fieldNodeIter, bufferInfoIter, is, outChunk, outOffset, diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index bd7fbfc3023..fc87aa29bcf 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -100,11 +100,18 @@ public Class[] columnTypes() { case "long": case "java.lang.Long": return long.class; + case "java.lang.Float": + case "float": + return float.class; case "java.lang.Double": case "double": return double.class; case "java.time.Instant": return DateWrapper.class; + case "java.math.BigInteger": + return BigIntegerWrapper.class; + case "java.math.BigDecimal": + return BigDecimalWrapper.class; default: return Object.class; } From 4db41da2b664553bf08f85131b9e344f9ab5185e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 06:49:34 -0500 Subject: [PATCH 108/219] Correctly throw when no column is present in TableData --- .../client/api/barrage/data/WebBarrageSubscription.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 85556e02b88..fe1e9fca285 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -26,6 +26,7 @@ import java.util.BitSet; import java.util.Iterator; import java.util.NavigableSet; +import java.util.NoSuchElementException; import java.util.PrimitiveIterator; import java.util.TreeMap; @@ -213,6 +214,9 @@ public void applyUpdates(WebBarrageMessage message) { @Override public Any getData(long key, int col) { + if (!isSubscribedColumn(col)) { + throw new NoSuchElementException("No column at index " + col); + } return destSources[col].get(key); } } @@ -383,6 +387,9 @@ public void applyUpdates(WebBarrageMessage message) { @Override public Any getData(long key, int col) { + if (!isSubscribedColumn(col)) { + throw new NoSuchElementException("No column at index " + col); + } return this.destSources[col].get(redirectedIndexes.get(key)); } From 2f8bdb8219b4ea1426ea1235b0694a13ea1fe989 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 06:57:21 -0500 Subject: [PATCH 109/219] Check that viewport matches before showing data --- .../subscription/AbstractTableSubscription.java | 16 ++++++++++------ .../subscription/TableViewportSubscription.java | 11 ++++------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 1eb5394cce0..c3dd45835c8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -74,6 +74,8 @@ public enum Status { protected final int rowStyleColumn; private JsArray columns; private BitSet columnBitSet; + protected RangeSet viewportRowSet; + private boolean isReverseViewport; private BarrageSubscriptionOptions options; private BiDiStream doExchange; @@ -159,7 +161,9 @@ protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray boolean isReverseViewport) { assert status == Status.ACTIVE || status == Status.PENDING_UPDATE : status; this.columns = columns; + this.viewportRowSet = viewport; this.columnBitSet = makeColumnBitset(columns); + this.isReverseViewport = isReverseViewport; // TODO validate that we can change updateinterval this.options = BarrageSubscriptionOptions.builder() .batchSize(WebBarrageSubscription.BATCH_SIZE) @@ -422,13 +426,13 @@ private static RangeSet transformRowsetForConsumer(RangeSet rowSet, @Nullable Ra return rowSet; } - protected void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { - // if (serverViewport != null || serverReverseViewport) { - // throw new IllegalStateException("Not a viewport subscription"); - // } - subscriptionReady = (serverColumns == null && columnBitSet == null) + private void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boolean serverReverseViewport) { + subscriptionReady = ((serverColumns == null && columnBitSet == null) || (serverColumns == null && columnBitSet.cardinality() == state.getTableDef().getColumns().length) - || (serverColumns != null && serverColumns.equals(this.columnBitSet)); + || (serverColumns != null && serverColumns.equals(this.columnBitSet))) + && (serverViewport == null && this.viewportRowSet == null + || (serverViewport != null && serverViewport.equals(this.viewportRowSet))) + && serverReverseViewport == isReverseViewport; } private final WebBarrageStreamReader reader = new WebBarrageStreamReader(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 88a44a75ec2..247100c7899 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -71,9 +71,6 @@ @TsName(namespace = "dh") public class TableViewportSubscription extends AbstractTableSubscription { - // TODO move to superclass and check on viewport change - private RangeSet serverViewport; - private double firstRow; private double lastRow; private Column[] columns; @@ -137,7 +134,7 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t // TODO Rewrite shifts as adds/removed/modifies? in the past we ignored them... UpdateEventData detail = new UpdateEventData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, rowsRemoved, totalMods, shifted); - detail.offset = this.serverViewport.getFirstRow(); + detail.offset = this.viewportRowSet.getFirstRow(); this.viewportData = detail; CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); @@ -227,9 +224,9 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column } if (!state().getTableDef().getAttributes().isBlinkTable()) { // we only set blink table viewports once; and that's in the constructor - serverViewport = RangeSet.ofRange((long) firstRow, (long) lastRow); - this.sendBarrageSubscriptionRequest( - serverViewport, Js.uncheckedCast(columns), updateIntervalMs, isReverseViewport); + RangeSet viewport = RangeSet.ofRange((long) firstRow, (long) lastRow); + this.sendBarrageSubscriptionRequest(viewport, Js.uncheckedCast(columns), updateIntervalMs, + isReverseViewport); } } From 63fe35d3f6c7af7e77b649778c64568bb9fee467 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 06:57:49 -0500 Subject: [PATCH 110/219] correctly return ViewportData from methods that provide it --- .../src/main/java/io/deephaven/web/client/api/JsTable.java | 3 ++- .../java/io/deephaven/web/client/api/JsTotalsTable.java | 3 ++- .../client/api/subscription/TableViewportSubscription.java | 6 +++--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index 4a1d2eabdee..ceebc40968f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -47,6 +47,7 @@ import io.deephaven.web.client.api.state.StateCache; import io.deephaven.web.client.api.subscription.TableSubscription; import io.deephaven.web.client.api.subscription.TableViewportSubscription; +import io.deephaven.web.client.api.subscription.ViewportData; import io.deephaven.web.client.api.subscription.ViewportRow; import io.deephaven.web.client.api.tree.JsRollupConfig; import io.deephaven.web.client.api.tree.JsTreeTable; @@ -746,7 +747,7 @@ public TableViewportSubscription setViewport(double firstRow, double lastRow, * @return Promise of {@link TableData} */ @JsMethod - public Promise getViewportData() { + public Promise getViewportData() { TableViewportSubscription subscription = subscriptions.get(getHandle()); if (subscription == null) { return Promise.reject("No viewport currently set"); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java index aa7163ac84f..9971529d45c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java @@ -13,6 +13,7 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.ticket_pb.TypedTicket; import io.deephaven.web.client.api.console.JsVariableType; import io.deephaven.web.client.api.filter.FilterCondition; +import io.deephaven.web.client.api.subscription.ViewportData; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.fu.RemoverFn; import jsinterop.annotations.JsIgnore; @@ -124,7 +125,7 @@ public void setViewport(double firstRow, double lastRow, @JsOptional JsArray

getViewportData() { + public Promise getViewportData() { return wrappedTable.getViewportData(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 247100c7899..c5d13933e6e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -272,16 +272,16 @@ public void internalClose() { * @return Promise of {@link TableData}. */ @JsMethod - public Promise getViewportData() { + public Promise getViewportData() { retainForExternalUse(); return getInternalViewportData(); } - public Promise getInternalViewportData() { + public Promise getInternalViewportData() { if (isSubscriptionReady()) { return Promise.resolve(viewportData); } - final LazyPromise promise = new LazyPromise<>(); + final LazyPromise promise = new LazyPromise<>(); addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(viewportData)); return promise.asPromise(); } From 9dd0a7b1a08e4613e150cc7944217ca31dd970f3 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 08:10:26 -0500 Subject: [PATCH 111/219] Draft at ordering columns so data makes sense --- .../api/subscription/TableViewportSubscription.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index c5d13933e6e..c822b4fbd9e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -6,6 +6,7 @@ import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; +import elemental2.core.JsArray; import elemental2.dom.CustomEvent; import elemental2.dom.CustomEventInit; import elemental2.dom.DomGlobal; @@ -35,7 +36,9 @@ import jsinterop.base.Js; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import static io.deephaven.web.client.api.barrage.WebBarrageUtils.serializeRanges; @@ -215,6 +218,14 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column this.refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; return; } + if (columns == null) { + // Null columns means the user wants all columns, only supported on viewports. This can't be done until the CTS has resolved + columns = state().getColumns(); + } else { + // If columns were provided, sort a copy so that we have them in the expected order + columns = Js.>uncheckedCast(columns).slice().asArray(new Column[0]); + Arrays.sort(columns, Comparator.comparing(Column::getIndex)); + } if (updateIntervalMs != null && refresh != updateIntervalMs) { throw new IllegalArgumentException( "Can't change refreshIntervalMs on a later call to setViewport, it must be consistent or omitted"); From 2aa7aa1f4ba7d201c8b4979d807f2e8ec51b30e0 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 08:13:57 -0500 Subject: [PATCH 112/219] Remove unnecessary casts --- .../web/client/api/subscription/ViewportTestGwt.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java index 8a1959fa671..8177daf620c 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/subscription/ViewportTestGwt.java @@ -374,7 +374,7 @@ public void testRapidChangingViewport() { table.setViewport(6, 14, null); return assertUpdateReceived(table, viewport -> { assertEquals(6d, viewport.getOffset()); - assertEquals(9, (int) viewport.getRows().length); + assertEquals(9, viewport.getRows().length); }, 1009); }) .then(table -> { @@ -389,7 +389,7 @@ public void testRapidChangingViewport() { }, 1010) .then(t -> { t.getViewportData().then(vp -> { - // assertEquals(7, (int) vp.getOffset()); + assertEquals(7d, vp.getOffset()); assertEquals(11, (int) vp.getRows().length); return Promise.resolve(vp); }); From 90b58a43cb6581d53820e0c491c896be081e9bcf Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 12:11:07 -0500 Subject: [PATCH 113/219] Make it easier to manually run GWT tests from the IDE --- web/client-api/client-api.gradle | 40 ++++++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 8ad75e7513e..926ff1506b7 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -71,7 +71,8 @@ artifacts { def gwtUnitTest = tasks.register('gwtUnitTest', Test) { t -> t.systemProperties = [ - 'gwt.args': ['-runStyle HtmlUnit', + 'gwt.args': ['-sourceLevel auto', + '-runStyle HtmlUnit', '-ea', '-style PRETTY', "-war ${layout.buildDirectory.dir('unitTest-war').get().asFile.absolutePath}" @@ -139,9 +140,10 @@ def stopSelenium = project.tasks.register('stopSelenium', DockerRemoveContainer) def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> t.dependsOn(deephavenDocker.portTask, seleniumHealthy) t.finalizedBy(deephavenDocker.endTask, stopSelenium) - doFirst { + t.doFirst { def webdriverUrl = "http://localhost:${seleniumPort}/" - t.systemProperty('gwt.args', ["-runStyle io.deephaven.web.junit.RunStyleRemoteWebDriver:${webdriverUrl}?firefox", + t.systemProperty('gwt.args', ['-sourceLevel auto', + "-runStyle io.deephaven.web.junit.RunStyleRemoteWebDriver:${webdriverUrl}?firefox", '-ea', '-style PRETTY', "-setProperty dh.server=http://${deephavenDocker.containerName.get()}:10000", @@ -149,7 +151,6 @@ def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> ].join(' ')) t.classpath += tasks.getByName('gwtCompile').src } - t.finalizedBy(deephavenDocker.endTask) t.systemProperties = [ 'gwt.persistentunitcachedir':layout.buildDirectory.dir('integrationTest-unitCache').get().asFile.absolutePath, 'webdriver.test.host':'host.docker.internal', @@ -159,6 +160,37 @@ def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> t.scanForTestClasses = false } +tasks.register('manualGwtTest', Test) {t -> + t.description = '''Test wiring to run either unit or integration tests with a manual browser and an already-running server. +This makes it easier to run a tests repeatedly, either one at a time or as a class/suite, without +paying to start/stop selenium and deephaven each time. The port will remain constant at 8888 each +run to let breakpoints continue to work across repeated runs. + +To use this, first start a server on port 10000 with anonymous access enabled. Then, either select +a test in IntelliJ to run using the manualGwtTest task, or invoke from the command line with info +logging enabled and a specific test selected, e.g.: +./gradlew :web-client-api:manualGwtTest --info --tests io.deephaven.web.client.api.NullValueTestGwt + +When the URL appears to run in your browser, click on it, or refresh an existing browser window.''' + t.doFirst { + t.systemProperty 'gwt.args', ['-port 8888', + '-sourceLevel auto', + '-runStyle Manual:1', + '-ea', + '-style PRETTY', + '-setProperty dh.server=http://localhost:10000', + '-setProperty compiler.useSourceMaps=true', + "-war ${layout.buildDirectory.dir('manualTest-war').get().asFile.absolutePath}" + ].join(' ') + t.classpath += tasks.getByName('gwtCompile').src + } + t.systemProperties = [ + 'gwt.persistentunitcachedir':layout.buildDirectory.dir('integrationTest-unitCache').get().asFile.absolutePath, + ] + t.useJUnit() + t.scanForTestClasses = false +} + tasks.named('check').configure { dependsOn(gwtUnitTest, gwtIntegrationTest) } From cb4e5bf17d7f3eb985caf4e16659b0cb7ffd3c32 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 13:40:46 -0500 Subject: [PATCH 114/219] Update web/client-api/client-api.gradle Co-authored-by: Mike Bender --- web/client-api/client-api.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 926ff1506b7..d9f93aecd89 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -171,7 +171,7 @@ a test in IntelliJ to run using the manualGwtTest task, or invoke from the comma logging enabled and a specific test selected, e.g.: ./gradlew :web-client-api:manualGwtTest --info --tests io.deephaven.web.client.api.NullValueTestGwt -When the URL appears to run in your browser, click on it, or refresh an existing browser window.''' +Click the URL that is printed out to run the test in your browser, or refresh an existing browser window.''' t.doFirst { t.systemProperty 'gwt.args', ['-port 8888', '-sourceLevel auto', From e918cab78de4e844b0da10a0e38ad6ad69f81e18 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 5 Jul 2024 13:41:14 -0500 Subject: [PATCH 115/219] Specify gwt tests go in the verification group --- web/client-api/client-api.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index d9f93aecd89..472b28727eb 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -70,6 +70,7 @@ artifacts { } def gwtUnitTest = tasks.register('gwtUnitTest', Test) { t -> + t.group = 'verification' t.systemProperties = [ 'gwt.args': ['-sourceLevel auto', '-runStyle HtmlUnit', @@ -138,6 +139,7 @@ def stopSelenium = project.tasks.register('stopSelenium', DockerRemoveContainer) } def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> + t.group = 'verification' t.dependsOn(deephavenDocker.portTask, seleniumHealthy) t.finalizedBy(deephavenDocker.endTask, stopSelenium) t.doFirst { @@ -161,6 +163,7 @@ def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> } tasks.register('manualGwtTest', Test) {t -> + t.group = 'verification' t.description = '''Test wiring to run either unit or integration tests with a manual browser and an already-running server. This makes it easier to run a tests repeatedly, either one at a time or as a class/suite, without paying to start/stop selenium and deephaven each time. The port will remain constant at 8888 each From 709c928f849a5dda9945ad9afb179253363d87d8 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 8 Jul 2024 10:24:45 -0500 Subject: [PATCH 116/219] Specify jsinterop exports in tests, restricted to avoid a bug --- buildSrc/src/main/groovy/GwtTools.groovy | 1 + web/client-api/client-api.gradle | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/buildSrc/src/main/groovy/GwtTools.groovy b/buildSrc/src/main/groovy/GwtTools.groovy index 23b5711d005..148eacfa0d4 100644 --- a/buildSrc/src/main/groovy/GwtTools.groovy +++ b/buildSrc/src/main/groovy/GwtTools.groovy @@ -63,6 +63,7 @@ class GwtTools { generateJsInteropExports = true // TODO move this down a line when we want to give clients js that is not super strict / rigged to blow checkAssertions = true + setExtraArgs('-includeJsInteropExports', 'io.deephaven.*') if (gwtDev) { saveSource = true extra = extras diff --git a/web/client-api/client-api.gradle b/web/client-api/client-api.gradle index 8aca979ebb6..c7d6bcfaa5f 100644 --- a/web/client-api/client-api.gradle +++ b/web/client-api/client-api.gradle @@ -95,6 +95,9 @@ def gwtUnitTest = tasks.register('gwtUnitTest', Test) { t -> '-runStyle HtmlUnit', '-ea', '-style PRETTY', + '-generateJsInteropExports', + '-includeJsInteropExports io.deephaven.*', + '-excludeJsInteropExports io.deephaven.web.client.api.widget.plot.*', "-war ${layout.buildDirectory.dir('unitTest-war').get().asFile.absolutePath}" ].join(' '), 'gwt.persistentunitcachedir': layout.buildDirectory.dir('unitTest-unitCache').get().asFile.absolutePath, @@ -167,6 +170,9 @@ def gwtIntegrationTest = tasks.register('gwtIntegrationTest', Test) { t -> "-runStyle io.deephaven.web.junit.RunStyleRemoteWebDriver:${webdriverUrl}?firefox", '-ea', '-style PRETTY', + '-generateJsInteropExports', + '-includeJsInteropExports io.deephaven.*', + '-excludeJsInteropExports io.deephaven.web.client.api.widget.plot.*', "-setProperty dh.server=http://${deephavenDocker.containerName.get()}:10000", "-war ${layout.buildDirectory.dir('integrationTest-war').get().asFile.absolutePath}" ].join(' ')) @@ -200,6 +206,9 @@ Click the URL that is printed out to run the test in your browser, or refresh an '-runStyle Manual:1', '-ea', '-style PRETTY', + '-generateJsInteropExports', + '-includeJsInteropExports io.deephaven.*', + '-excludeJsInteropExports io.deephaven.web.client.api.widget.plot.*', '-setProperty dh.server=http://localhost:10000', '-setProperty compiler.useSourceMaps=true', "-war ${layout.buildDirectory.dir('manualTest-war').get().asFile.absolutePath}" From 7835c4466ff980bad04d79ca384fc8169b8a57c3 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 8 Jul 2024 11:09:36 -0500 Subject: [PATCH 117/219] Spotless, imports --- .../api/barrage/WebChunkReaderFactory.java | 5 +++-- .../subscription/AbstractTableSubscription.java | 2 +- .../subscription/TableViewportSubscription.java | 3 ++- .../client/api/subscription/ViewportData.java | 16 +--------------- 4 files changed, 7 insertions(+), 19 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java index 5cc12ffaf68..ba0c50f767c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebChunkReaderFactory.java @@ -95,7 +95,7 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t is, fieldNodeIter, bufferInfoIter, - (val, off, len) -> new BigIntegerWrapper(new BigInteger(val, off, len)), + (val, off, len) -> new BigIntegerWrapper(new BigInteger(val, off, len)), outChunk, outOffset, totalRows); } if (typeInfo.type() == BigDecimalWrapper.class) { @@ -111,7 +111,8 @@ public ChunkReader getReader(StreamReaderOptions options, int factor, TypeInfo t final byte b3 = buf[offset + 2]; final byte b4 = buf[offset + 3]; final int scale = b4 << 24 | (b3 & 0xFF) << 16 | (b2 & 0xFF) << 8 | (b1 & 0xFF); - BigDecimal bigDecimal = new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); + BigDecimal bigDecimal = + new BigDecimal(new BigInteger(buf, offset + 4, length - 4), scale); return new BigDecimalWrapper(bigDecimal); }, outChunk, outOffset, totalRows); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index c3dd45835c8..50f460cbdbd 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -431,7 +431,7 @@ private void onViewportChange(RangeSet serverViewport, BitSet serverColumns, boo || (serverColumns == null && columnBitSet.cardinality() == state.getTableDef().getColumns().length) || (serverColumns != null && serverColumns.equals(this.columnBitSet))) && (serverViewport == null && this.viewportRowSet == null - || (serverViewport != null && serverViewport.equals(this.viewportRowSet))) + || (serverViewport != null && serverViewport.equals(this.viewportRowSet))) && serverReverseViewport == isReverseViewport; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index c822b4fbd9e..643992effa8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -219,7 +219,8 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column return; } if (columns == null) { - // Null columns means the user wants all columns, only supported on viewports. This can't be done until the CTS has resolved + // Null columns means the user wants all columns, only supported on viewports. This can't be done until the + // CTS has resolved columns = state().getColumns(); } else { // If columns were provided, sort a copy so that we have them in the expected order diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index 15894dd6ce5..7d4343a4704 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -5,22 +5,8 @@ import com.vertispan.tsdefs.annotations.TsInterface; import com.vertispan.tsdefs.annotations.TsName; -import elemental2.core.JsArray; -import elemental2.core.JsObject; -import io.deephaven.web.client.api.*; -import io.deephaven.web.shared.data.*; -import jsinterop.annotations.JsFunction; +import io.deephaven.web.client.api.TableData; import jsinterop.annotations.JsProperty; -import jsinterop.base.Any; -import jsinterop.base.Js; -import jsinterop.base.JsArrayLike; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.HashSet; -import java.util.Iterator; -import java.util.PrimitiveIterator.OfLong; -import java.util.Set; /** * Extends {@link TableData}, but only contains data in the current viewport. The only API change from TableData is that From 3ffcc2797f9a99c6782b377f163023d89b6879bc Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Jul 2024 16:14:15 -0500 Subject: [PATCH 118/219] Restore BBIS.readUTF() --- .../io/streams/ByteBufferInputStream.java | 63 +++++++++---------- .../super/java/io/UTFDataFormatException.java | 11 ++++ 2 files changed, 42 insertions(+), 32 deletions(-) create mode 100644 web/client-api/src/main/resources/io/deephaven/web/super/java/io/UTFDataFormatException.java diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java index 7b78bc802a2..0fef7bc5ee7 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java @@ -299,37 +299,36 @@ public String readLine() throws IOException { @Override public String readUTF() throws IOException { - throw new UnsupportedOperationException("readUTF"); - // int length = 0; - // int total = readUnsignedShort(); - // - // final char[] chars = new char[total]; - // - // while (total > 0) { - // final int b1 = buf.get(); - // if ((b1 & 0x80) == 0) { - // chars[length++] = (char) (b1 & 0xff); - // total--; - // } else if ((b1 & 0xe0) == 0xc0) { - // final int b2 = buf.get(); - // if ((b2 & 0xc0) != 0x80) { - // throw new UTFDataFormatException("malformed second byte " + b2); - // } - // chars[length++] = (char) (((b1 & 0x1F) << 6) | (b2 & 0x3F)); - // total -= 2; - // } else if ((b1 & 0xf0) == 0xe0) { - // final int b2 = buf.get(); - // final int b3 = buf.get(); - // if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { - // throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); - // } - // chars[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); - // total -= 3; - // } else { - // throw new UTFDataFormatException("malformed first byte " + b1); - // } - // } - // - // return new String(chars, 0, length); + int length = 0; + int total = readUnsignedShort(); + + final char[] chars = new char[total]; + + while (total > 0) { + final int b1 = buf.get(); + if ((b1 & 0x80) == 0) { + chars[length++] = (char) (b1 & 0xff); + total--; + } else if ((b1 & 0xe0) == 0xc0) { + final int b2 = buf.get(); + if ((b2 & 0xc0) != 0x80) { + throw new UTFDataFormatException("malformed second byte " + b2); + } + chars[length++] = (char) (((b1 & 0x1F) << 6) | (b2 & 0x3F)); + total -= 2; + } else if ((b1 & 0xf0) == 0xe0) { + final int b2 = buf.get(); + final int b3 = buf.get(); + if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { + throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); + } + chars[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); + total -= 3; + } else { + throw new UTFDataFormatException("malformed first byte " + b1); + } + } + + return new String(chars, 0, length); } } diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/UTFDataFormatException.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/UTFDataFormatException.java new file mode 100644 index 00000000000..e2c6a3d3bd8 --- /dev/null +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/UTFDataFormatException.java @@ -0,0 +1,11 @@ +package java.io; + +public class UTFDataFormatException extends IOException { + public UTFDataFormatException() { + super(); + } + + public UTFDataFormatException(String message) { + super(message); + } +} From 597ab637a668622a8b0681e1b5083981788b0ff3 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Jul 2024 16:22:08 -0500 Subject: [PATCH 119/219] Add empty impls of flexbuffers, since we can't support wrapping byte[]s --- .../flatbuffers/super/com/google/flatbuffers/FlexBuffers.java | 4 ++++ .../super/com/google/flatbuffers/FlexBuffersBuilder.java | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffers.java create mode 100644 web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffersBuilder.java diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffers.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffers.java new file mode 100644 index 00000000000..63fa60c02af --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffers.java @@ -0,0 +1,4 @@ +package com.google.flatbuffers; + +public class FlexBuffers { +} \ No newline at end of file diff --git a/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffersBuilder.java b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffersBuilder.java new file mode 100644 index 00000000000..479fdbb1b78 --- /dev/null +++ b/web/client-api/src/main/resources/com/google/flatbuffers/super/com/google/flatbuffers/FlexBuffersBuilder.java @@ -0,0 +1,4 @@ +package com.google.flatbuffers; + +public class FlexBuffersBuilder { +} \ No newline at end of file From 2fe21d5f2721a089595f53e3eb09d9a7877748c4 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Jul 2024 20:01:57 -0500 Subject: [PATCH 120/219] Bump to snapshot again --- gradle/libs.versions.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 0a130cb8481..7cc90800a52 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -76,7 +76,7 @@ tdunning = "3.2" trove = "3.0.3" undercouch = "2.15.1" univocity = "2.6.0" -vertispan-nio = "1.0-alpha-1" +vertispan-nio = "0.1-SNAPSHOT" vertispan-ts-defs = "1.0.0-RC4" # test versions From 8cbeb842e2b266143bf4aecb204ba1438fac1271 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 9 Jul 2024 21:37:59 -0500 Subject: [PATCH 121/219] refactor: Migrate zero-length arrays from CollectionUtil to ArrayTypeUtils Partial #188 --- .../generator/EnumStringGenerator.java | 3 +- .../datastructures/util/CollectionUtil.java | 13 +- .../plot/util/GenerateFigureImmutable.java | 2 +- .../main/java/io/deephaven/plot/AxesImpl.java | 7 +- .../io/deephaven/plot/BaseFigureImpl.java | 3 +- .../java/io/deephaven/plot/FigureImpl.java | 180 +++++++++--------- .../java/io/deephaven/plot/FigureWidget.java | 3 +- .../io/deephaven/plot/util/PlotUtils.java | 3 +- .../deephaven/util/codec/BigDecimalCodec.java | 4 +- .../io/deephaven/util/codec/MapCodec.java | 4 +- .../io/deephaven/util/codec/ObjectCodec.java | 4 +- .../util/codec/SimpleByteArrayCodec.java | 4 +- .../util/codec/ZonedDateTimeCodec.java | 4 +- .../util/text/SplitIgnoreQuotes.java | 4 +- .../deephaven/util/type/ArrayTypeUtils.java | 1 + .../util/files/TestDirWatchService.java | 11 +- .../engine/table/ModifiedColumnSet.java | 3 +- .../benchmark/engine/LastByBenchmark.java | 4 +- .../engine/PercentileByBenchmark.java | 4 +- .../benchmark/engine/SumByBenchmark.java | 4 +- .../io/deephaven/stringset/HashStringSet.java | 3 +- .../engine/context/QueryCompilerImpl.java | 3 +- .../engine/table/impl/DeferredViewTable.java | 4 +- .../engine/table/impl/KeyedTableListener.java | 3 +- .../engine/table/impl/NaturalJoinHelper.java | 3 +- .../engine/table/impl/QueryTable.java | 9 +- .../engine/table/impl/SortHelpers.java | 4 +- .../engine/table/impl/SortListener.java | 4 +- .../engine/table/impl/SparseSelect.java | 10 +- .../engine/table/impl/TableDefaults.java | 11 +- .../engine/table/impl/WhereListener.java | 3 +- .../table/impl/WouldMatchOperation.java | 3 +- .../table/impl/by/AggregationProcessor.java | 18 +- .../table/impl/by/GroupByChunkedOperator.java | 3 +- .../IterativeChunkedAggregationOperator.java | 3 +- .../locations/impl/AbstractTableLocation.java | 3 +- .../perf/QueryPerformanceRecorderState.java | 3 +- .../impl/select/analyzers/BaseLayer.java | 4 +- .../analyzers/SelectAndViewAnalyzer.java | 3 +- .../impl/select/codegen/FormulaAnalyzer.java | 7 +- .../select/python/FormulaColumnPython.java | 9 +- .../impl/util/ColumnsToRowsTransform.java | 5 +- .../io/deephaven/engine/util/TableDiff.java | 5 +- .../deephaven/engine/util/TableShowTools.java | 3 +- .../io/deephaven/engine/util/TableTools.java | 7 +- .../engine/util/TotalsTableBuilder.java | 5 +- .../table/impl/DeferredViewTableTest.java | 10 +- .../table/impl/QueryTableAggregationTest.java | 3 +- .../engine/table/impl/QueryTableAjTest.java | 8 +- .../impl/QueryTableCrossJoinTestBase.java | 5 +- .../engine/table/impl/QueryTableJoinTest.java | 7 +- .../impl/QueryTableLeftOuterJoinTestBase.java | 5 +- .../table/impl/QueryTableMultiJoinTest.java | 20 +- .../table/impl/QueryTableNaturalJoinTest.java | 4 +- ...QueryTableStaticNaturalJoinRandomTest.java | 3 +- .../engine/table/impl/QueryTableTest.java | 24 +-- .../table/impl/SelectOverheadLimiter.java | 2 - .../impl/TestPartitionAwareSourceTable.java | 4 +- .../table/impl/updateby/BaseUpdateByTest.java | 6 +- .../table/impl/updateby/TestCumMinMax.java | 4 +- .../table/impl/updateby/TestCumProd.java | 4 +- .../table/impl/updateby/TestCumSum.java | 4 +- .../impl/updateby/TestRollingProduct.java | 3 +- .../deephaven/engine/util/TestTableTools.java | 6 +- .../stream/TestStreamToBlinkTableAdapter.java | 4 +- .../engine/testutil/GenerateTableUpdates.java | 3 +- .../generator/UniqueStringArrayGenerator.java | 3 +- .../java/io/deephaven/tuple/ArrayTuple.java | 4 +- .../barrage/util/BarrageProtoUtil.java | 4 +- .../main/java/io/deephaven/csv/CsvTools.java | 5 +- .../base/BigDecimalParquetBytesCodec.java | 4 +- .../base/BigIntegerParquetBytesCodec.java | 4 +- .../table/ops/CreateInputTableGrpcImpl.java | 3 +- .../table/ops/HeadOrTailByGrpcImpl.java | 3 +- .../table/ops/RunChartDownsampleGrpcImpl.java | 3 +- .../ops/UnstructuredFilterTableGrpcImpl.java | 3 +- .../table/ops/UpdateOrSelectGrpcImpl.java | 3 +- .../validation/ColumnExpressionValidator.java | 3 +- 78 files changed, 268 insertions(+), 306 deletions(-) diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringGenerator.java index 0217bdd9040..b313b8c1d80 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringGenerator.java @@ -3,7 +3,6 @@ // package io.deephaven.benchmarking.generator; -import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.benchmarking.generator.random.ExtendedRandom; import org.jetbrains.annotations.NotNull; @@ -50,7 +49,7 @@ public void init(@NotNull ExtendedRandom random) { enums.add(super.get()); } - enumVals = enums.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + enumVals = enums.toArray(String[]::new); } public String get() { diff --git a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java index a6b3f6b99df..629860e6c02 100644 --- a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java +++ b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java @@ -16,16 +16,27 @@ */ public class CollectionUtil { - public static final byte[] ZERO_LENGTH_BYTE_ARRAY = new byte[0]; + @Deprecated(forRemoval = true) public static final short[] ZERO_LENGTH_SHORT_ARRAY = new short[0]; + @Deprecated(forRemoval = true) + public static final byte[] ZERO_LENGTH_BYTE_ARRAY = new byte[0]; + @Deprecated(forRemoval = true) public static final int[] ZERO_LENGTH_INT_ARRAY = new int[0]; + @Deprecated(forRemoval = true) public static final int[][] ZERO_LENGTH_INT_ARRAY_ARRAY = new int[0][]; + @Deprecated(forRemoval = true) public static final long[] ZERO_LENGTH_LONG_ARRAY = new long[0]; + @Deprecated(forRemoval = true) public static final float[] ZERO_LENGTH_FLOAT_ARRAY = new float[0]; + @Deprecated(forRemoval = true) public static final double[] ZERO_LENGTH_DOUBLE_ARRAY = new double[0]; + @Deprecated(forRemoval = true) public static final double[][] ZERO_LENGTH_DOUBLE_ARRAY_ARRAY = new double[0][]; + @Deprecated(forRemoval = true) public static final Object[] ZERO_LENGTH_OBJECT_ARRAY = new Object[0]; + @Deprecated(forRemoval = true) public static final String[] ZERO_LENGTH_STRING_ARRAY = new String[0]; + @Deprecated(forRemoval = true) public static final String[][] ZERO_LENGTH_STRING_ARRAY_ARRAY = new String[0][]; // ---------------------------------------------------------------- diff --git a/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java b/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java index 8731b1c7969..f8d685549b6 100644 --- a/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java +++ b/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java @@ -834,7 +834,7 @@ private static String createMultiSeriesArgs(JavaFunction f) { final String[] names = f.getParameterNames(); String args = String.join(", ", names); if (!names[names.length - 1].equals("keys")) { - args += ", io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY"; + args += ", io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY"; } return args; diff --git a/Plot/src/main/java/io/deephaven/plot/AxesImpl.java b/Plot/src/main/java/io/deephaven/plot/AxesImpl.java index dab4dbe75ce..235c18b943c 100644 --- a/Plot/src/main/java/io/deephaven/plot/AxesImpl.java +++ b/Plot/src/main/java/io/deephaven/plot/AxesImpl.java @@ -5,7 +5,6 @@ import io.deephaven.api.ColumnName; import io.deephaven.api.agg.Aggregation; -import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.engine.table.impl.MemoizedOperationKey; import io.deephaven.plot.axisformatters.AxisFormat; import io.deephaven.plot.axisformatters.NanosAxisFormat; @@ -1583,7 +1582,7 @@ public IntervalXYDataSeriesArray histPlot(final Comparable seriesName, final Sel final List allCols = new ArrayList<>(byCols); allCols.add(x); final SwappableTable ht = sds.getSwappableTable(seriesName, chart, tableTransform, - allCols.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + allCols.toArray(String[]::new)); return histPlot(seriesName, ht); } @@ -1607,7 +1606,7 @@ public IntervalXYDataSeriesArray histPlot(final Comparable seriesName, final Sel final List allCols = new ArrayList<>(byCols); allCols.add(x); final SwappableTable ht = sds.getSwappableTable(seriesName, chart, tableTransform, - allCols.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + allCols.toArray(String[]::new)); return histPlot(seriesName, ht); } @@ -1653,7 +1652,7 @@ public CategoryDataSeriesSwappablePartitionedTable catHistPlot(final Comparable } final Function tableTransform = (Function & Serializable) t -> PlotUtils - .createCategoryHistogramTable(t, cols.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + .createCategoryHistogramTable(t, cols.toArray(String[]::new)); final SwappableTable counts = sds.getSwappableTable(seriesName, chart, tableTransform, categories, CategoryDataSeries.CAT_SERIES_ORDER_COLUMN); final CategoryDataSeriesSwappablePartitionedTable ds = new CategoryDataSeriesSwappablePartitionedTable(this, diff --git a/Plot/src/main/java/io/deephaven/plot/BaseFigureImpl.java b/Plot/src/main/java/io/deephaven/plot/BaseFigureImpl.java index 28faaf0f4a7..278d52c0639 100644 --- a/Plot/src/main/java/io/deephaven/plot/BaseFigureImpl.java +++ b/Plot/src/main/java/io/deephaven/plot/BaseFigureImpl.java @@ -5,7 +5,6 @@ import io.deephaven.api.Selectable; import io.deephaven.configuration.Configuration; -import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.plot.errors.*; import io.deephaven.plot.util.functions.FigureImplFunction; @@ -491,7 +490,7 @@ public void consolidatePartitionedTables() { final Map, PartitionedTable> byColMap = new HashMap<>(); for (final PartitionedTableHandle h : hs) { final Set keyColumns = h.getKeyColumns(); - final String[] keyColumnsArray = keyColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] keyColumnsArray = keyColumns.toArray(String[]::new); final PartitionedTable partitionedTable = byColMap.computeIfAbsent(keyColumns, x -> { diff --git a/Plot/src/main/java/io/deephaven/plot/FigureImpl.java b/Plot/src/main/java/io/deephaven/plot/FigureImpl.java index c6e8a2613f9..ebbbf0b6b72 100644 --- a/Plot/src/main/java/io/deephaven/plot/FigureImpl.java +++ b/Plot/src/main/java/io/deephaven/plot/FigureImpl.java @@ -3172,7 +3172,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).errorBarColor( errorBarColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl errorBarColor( int errorBarColor )'", figure); @@ -3186,7 +3186,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).errorBarColor( errorBarColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl errorBarColor( io.deephaven.gui.color.Paint errorBarColor )'", figure); @@ -3200,7 +3200,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).errorBarColor( errorBarColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).errorBarColor(errorBarColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl errorBarColor( java.lang.String errorBarColor )'", figure); @@ -3214,7 +3214,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).gradientVisible( gradientVisible); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).gradientVisible(gradientVisible, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).gradientVisible(gradientVisible, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl gradientVisible( boolean gradientVisible )'", figure); @@ -3228,7 +3228,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).lineColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl lineColor( int color )'", figure); @@ -3242,7 +3242,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).lineColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl lineColor( io.deephaven.gui.color.Paint color )'", figure); @@ -3256,7 +3256,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).lineColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).lineColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl lineColor( java.lang.String color )'", figure); @@ -3270,7 +3270,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).lineStyle( lineStyle); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).lineStyle(lineStyle, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).lineStyle(lineStyle, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl lineStyle( io.deephaven.plot.LineStyle lineStyle )'", figure); @@ -3284,7 +3284,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).linesVisible( visible); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).linesVisible(visible, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).linesVisible(visible, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl linesVisible( java.lang.Boolean visible )'", figure); @@ -3298,7 +3298,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( int pointColor )'", figure); @@ -3312,7 +3312,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.gui.color.Paint pointColor )'", figure); @@ -3326,7 +3326,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.String pointColor )'", figure); @@ -3340,7 +3340,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointLabel( pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( java.lang.Object pointLabel )'", figure); @@ -3354,7 +3354,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointLabelFormat( pointLabelFormat); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabelFormat(pointLabelFormat, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabelFormat(pointLabelFormat, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabelFormat( java.lang.String pointLabelFormat )'", figure); @@ -3368,7 +3368,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointShape( pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.gui.shape.Shape pointShape )'", figure); @@ -3382,7 +3382,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointShape( pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.lang.String pointShape )'", figure); @@ -3396,7 +3396,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointSize( pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( double pointSize )'", figure); @@ -3410,7 +3410,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointSize( pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( int pointSize )'", figure); @@ -3424,7 +3424,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointSize( pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.lang.Number pointSize )'", figure); @@ -3438,7 +3438,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointSize( pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( long pointSize )'", figure); @@ -3452,7 +3452,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).pointsVisible( visible); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointsVisible(visible, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointsVisible(visible, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointsVisible( java.lang.Boolean visible )'", figure); @@ -3466,7 +3466,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).seriesColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl seriesColor( int color )'", figure); @@ -3480,7 +3480,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).seriesColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl seriesColor( io.deephaven.gui.color.Paint color )'", figure); @@ -3494,7 +3494,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).seriesColor( color); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).seriesColor(color, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl seriesColor( java.lang.String color )'", figure); @@ -3508,7 +3508,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).toolTipPattern( toolTipPattern); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).toolTipPattern(toolTipPattern, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).toolTipPattern(toolTipPattern, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl toolTipPattern( java.lang.String toolTipPattern )'", figure); @@ -3522,7 +3522,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).xToolTipPattern( xToolTipPattern); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).xToolTipPattern(xToolTipPattern, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).xToolTipPattern(xToolTipPattern, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl xToolTipPattern( java.lang.String xToolTipPattern )'", figure); @@ -3536,7 +3536,7 @@ private FigureImpl applyFunctionalProperties() { DataSeries result = ((DataSeries) series).yToolTipPattern( yToolTipPattern); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).yToolTipPattern(yToolTipPattern, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).yToolTipPattern(yToolTipPattern, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl yToolTipPattern( java.lang.String yToolTipPattern )'", figure); @@ -3550,7 +3550,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).group( group); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).group(group, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).group(group, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl group( int group )'", figure); @@ -3564,7 +3564,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).piePercentLabelFormat( pieLabelFormat); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).piePercentLabelFormat(pieLabelFormat, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).piePercentLabelFormat(pieLabelFormat, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl piePercentLabelFormat( java.lang.String pieLabelFormat )'", figure); @@ -3578,7 +3578,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( t, category, pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(t, category, pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(t, category, pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.engine.table.Table t, java.lang.String category, java.lang.String pointColor )'", figure); @@ -3592,7 +3592,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( sds, category, pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(sds, category, pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(sds, category, pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String category, java.lang.String pointColor )'", figure); @@ -3606,7 +3606,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( category, pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.Comparable category, int pointColor )'", figure); @@ -3620,7 +3620,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( category, pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.Comparable category, io.deephaven.gui.color.Paint pointColor )'", figure); @@ -3634,7 +3634,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( category, pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(category, pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.Comparable category, java.lang.String pointColor )'", figure); @@ -3648,7 +3648,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointLabel( t, category, pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(t, category, pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(t, category, pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( io.deephaven.engine.table.Table t, java.lang.String category, java.lang.String pointLabel )'", figure); @@ -3662,7 +3662,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointLabel( sds, category, pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(sds, category, pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(sds, category, pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String category, java.lang.String pointLabel )'", figure); @@ -3676,7 +3676,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointLabel( category, pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(category, pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(category, pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( java.lang.Comparable category, java.lang.Object pointLabel )'", figure); @@ -3690,7 +3690,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( groovy.lang.Closure pointShapes )'", figure); @@ -3704,7 +3704,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( t, category, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(t, category, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(t, category, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.engine.table.Table t, java.lang.String category, java.lang.String pointShape )'", figure); @@ -3718,7 +3718,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( sds, category, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(sds, category, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(sds, category, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String category, java.lang.String pointShape )'", figure); @@ -3732,7 +3732,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( category, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(category, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(category, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.lang.Comparable category, io.deephaven.gui.shape.Shape pointShape )'", figure); @@ -3746,7 +3746,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( category, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(category, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(category, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.lang.Comparable category, java.lang.String pointShape )'", figure); @@ -3760,7 +3760,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.util.function.Function pointShapes )'", figure); @@ -3774,7 +3774,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( t, category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(t, category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(t, category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( io.deephaven.engine.table.Table t, java.lang.String category, java.lang.String pointSize )'", figure); @@ -3788,7 +3788,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( sds, category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(sds, category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(sds, category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String category, java.lang.String pointSize )'", figure); @@ -3802,7 +3802,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.lang.Comparable category, double pointSize )'", figure); @@ -3816,7 +3816,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.lang.Comparable category, int pointSize )'", figure); @@ -3830,7 +3830,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.lang.Comparable category, java.lang.Number pointSize )'", figure); @@ -3844,7 +3844,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( category, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(category, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.lang.Comparable category, long pointSize )'", figure); @@ -4628,7 +4628,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( pointColors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( int... pointColors )'", figure); @@ -4642,7 +4642,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( t, pointColors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(t, pointColors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(t, pointColors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.engine.table.Table t, java.lang.String pointColors )'", figure); @@ -4656,7 +4656,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.gui.color.Paint... pointColor )'", figure); @@ -4670,7 +4670,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( sds, pointColors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(sds, pointColors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(sds, pointColors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String pointColors )'", figure); @@ -4684,7 +4684,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( pointColors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.Integer... pointColors )'", figure); @@ -4698,7 +4698,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColor( pointColors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.lang.String... pointColors )'", figure); @@ -4712,7 +4712,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointColorInteger( colors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColorInteger( io.deephaven.plot.datasets.data.IndexableData colors )'", figure); @@ -4726,7 +4726,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointLabel( t, pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(t, pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(t, pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( io.deephaven.engine.table.Table t, java.lang.String pointLabel )'", figure); @@ -4740,7 +4740,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointLabel( pointLabels); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( io.deephaven.plot.datasets.data.IndexableData pointLabels )'", figure); @@ -4754,7 +4754,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointLabel( sds, pointLabel); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(sds, pointLabel, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(sds, pointLabel, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String pointLabel )'", figure); @@ -4768,7 +4768,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointLabel( pointLabels); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( java.lang.Object... pointLabels )'", figure); @@ -4782,7 +4782,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointShape( t, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(t, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(t, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.engine.table.Table t, java.lang.String pointShape )'", figure); @@ -4796,7 +4796,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.gui.shape.Shape... pointShapes )'", figure); @@ -4810,7 +4810,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.plot.datasets.data.IndexableData pointShapes )'", figure); @@ -4824,7 +4824,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointShape( sds, pointShape); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(sds, pointShape, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(sds, pointShape, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String pointShape )'", figure); @@ -4838,7 +4838,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.lang.String... pointShapes )'", figure); @@ -4852,7 +4852,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( double... pointSizes )'", figure); @@ -4866,7 +4866,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( int... pointSizes )'", figure); @@ -4880,7 +4880,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( t, pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(t, pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(t, pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( io.deephaven.engine.table.Table t, java.lang.String pointSizes )'", figure); @@ -4894,7 +4894,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( io.deephaven.plot.datasets.data.IndexableData pointSizes )'", figure); @@ -4908,7 +4908,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( sds, pointSize); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(sds, pointSize, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(sds, pointSize, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( io.deephaven.plot.filters.SelectableDataSet sds, java.lang.String pointSize )'", figure); @@ -4922,7 +4922,7 @@ private FigureImpl applyFunctionalProperties() { XYDataSeries result = ((XYDataSeries) series).pointSize( pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( long... pointSizes )'", figure); @@ -4969,7 +4969,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.util.Map pointColor )'", figure); @@ -4994,7 +4994,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColorInteger( colors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColorInteger( java.util.Map colors )'", figure); @@ -5019,7 +5019,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointLabel( pointLabels); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointLabel( java.util.Map pointLabels )'", figure); @@ -5044,7 +5044,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( categories, pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( CATEGORY[] categories, NUMBER[] pointSizes )'", figure); @@ -5058,7 +5058,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( java.util.Map pointSizes )'", figure); @@ -5094,7 +5094,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointShape( pointShapes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointShape(pointShapes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointShape( java.util.Map pointShapes )'", figure); @@ -5108,7 +5108,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( categories, pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( CATEGORY[] categories, double[] pointSizes )'", figure); @@ -5122,7 +5122,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( categories, pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( CATEGORY[] categories, int[] pointSizes )'", figure); @@ -5136,7 +5136,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointSize( categories, pointSizes); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointSize(categories, pointSizes, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointSize( CATEGORY[] categories, long[] pointSizes )'", figure); @@ -5194,7 +5194,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( groovy.lang.Closure pointColor )'", figure); @@ -5208,7 +5208,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColor( pointColor); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColor(pointColor, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColor( java.util.function.Function pointColor )'", figure); @@ -5244,7 +5244,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColorInteger( colors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColorInteger( groovy.lang.Closure colors )'", figure); @@ -5258,7 +5258,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointColorInteger( colors); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointColorInteger(colors, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public FigureImpl pointColorInteger( java.util.function.Function colors )'", figure); @@ -5294,7 +5294,7 @@ private FigureImpl applyFunctionalProperties() { CategoryDataSeries result = ((CategoryDataSeries) series).pointLabel( pointLabels); return make((DataSeriesInternal)result); } else if(series instanceof MultiSeries) { - final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final MultiSeries result = ((MultiSeries) series).pointLabel(pointLabels, io.deephaven.util.type.ArrayTypeUtils.EMPTY_OBJECT_ARRAY); return make((SeriesInternal) result); } else { throw new PlotUnsupportedOperationException("Series type does not support this method. seriesType=" + series.getClass() + " method='@Override public getViewportData() { + public Promise getViewportData() { return wrappedTable.getViewportData(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java index 87ea4eaa16a..9ba1e7f3608 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java @@ -8,7 +8,7 @@ import com.vertispan.tsdefs.annotations.TsUnion; import com.vertispan.tsdefs.annotations.TsUnionMember; import elemental2.core.JsArray; -import io.deephaven.web.client.api.subscription.AbstractTableSubscription; +import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsOverlay; import jsinterop.annotations.JsPackage; @@ -29,8 +29,9 @@ * Java note: this interface contains some extra overloads that aren't available in JS. Implementations are expected to * implement only abstract methods, and default methods present in this interface will dispatch accordingly. */ -@TsName(namespace = "dh") +@JsType(namespace = "dh") public interface TableData { + @JsIgnore int NO_ROW_FORMAT_COLUMN = -1; /** @@ -66,9 +67,7 @@ default int asInt() { JsArray getColumns(); /** - * A lazily computed array of all rows in the entire table - * - * @return {@link AbstractTableSubscription.SubscriptionRow} array. + * A lazily computed array of all rows available on the client. */ @JsProperty JsArray<@TsTypeRef(Row.class) ? extends Row> getRows(); @@ -87,8 +86,10 @@ default Row get(RowPositionUnion index) { return get(Js.coerceToInt(index)); } + @JsIgnore Row get(long index); + @JsIgnore Row get(int index); /** @@ -106,8 +107,10 @@ default Any getData(RowPositionUnion index, Column column) { return getData(index.asInt(), column); } + @JsIgnore Any getData(int index, Column column); + @JsIgnore Any getData(long index, Column column); /** @@ -125,11 +128,18 @@ default Format getFormat(RowPositionUnion index, Column column) { return getFormat(index.asInt(), column); } + @JsIgnore Format getFormat(int index, Column column); + @JsIgnore Format getFormat(long index, Column column); - @TsName(namespace = "dh") + /** + * Represents a row available in a subscription/snapshot on the client. Do not retain references to rows - they will + * not function properly when the event isn't actively going off (or promise resolving). Instead, wait for the next + * event, or re-request the viewport data. + */ + @JsType(namespace = "dh") interface Row { @JsProperty LongWrapper getIndex(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index b78f4a4c7b5..dd51f565104 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -4,9 +4,7 @@ package io.deephaven.web.client.api.subscription; import com.google.flatbuffers.FlatBufferBuilder; -import com.vertispan.tsdefs.annotations.TsInterface; -import com.vertispan.tsdefs.annotations.TsName; -import com.vertispan.tsdefs.annotations.TsTypeRef; +import com.vertispan.tsdefs.annotations.TsIgnore; import elemental2.core.JsArray; import elemental2.dom.CustomEventInit; import io.deephaven.barrage.flatbuf.BarrageMessageType; @@ -32,6 +30,7 @@ import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; +import jsinterop.annotations.JsProperty; import jsinterop.base.Any; import jsinterop.base.Js; import org.jetbrains.annotations.Nullable; @@ -227,7 +226,7 @@ private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet to protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { // TODO (deephaven-core#2435) Rewrite shifts as adds/removed/modifies - UpdateEventData detail = new UpdateEventData( + UpdateEventData detail = new SubscriptionEventData( barrageSubscription, rowStyleColumn, columns, @@ -243,8 +242,6 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t fireEvent(TableSubscription.EVENT_UPDATED, event); } - @TsInterface - @TsName(namespace = "dh") public static class SubscriptionRow implements TableData.Row { private final WebBarrageSubscription subscription; private final int rowStyleColumn; @@ -294,15 +291,49 @@ public Format getFormat(Column column) { } } + /** + * TableData type for both viewports and full table subscriptions. + */ + @TsIgnore + public static class SubscriptionEventData extends UpdateEventData implements ViewportData, SubscriptionTableData { + public SubscriptionEventData(WebBarrageSubscription subscription, int rowStyleColumn, JsArray columns, + RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { + super(subscription, rowStyleColumn, columns, added, removed, modified, shifted); + } + + @Override + public JsRangeSet getAdded() { + return added; + } + + @Override + public JsRangeSet getRemoved() { + return removed; + } + + @Override + public JsRangeSet getModified() { + return modified; + } - public static class UpdateEventData implements SubscriptionTableData, ViewportData { + @Override + public JsRangeSet getFullIndex() { + return fullRowSet; + } + } + + /** + * Base type to allow trees to extend from here separately from tables. + */ + @TsIgnore + public abstract static class UpdateEventData implements TableData { protected final WebBarrageSubscription subscription; private final int rowStyleColumn; private final JsArray columns; - private final JsRangeSet added; - private final JsRangeSet removed; - private final JsRangeSet modified; - private final JsRangeSet fullRowSet; + protected final JsRangeSet added; + protected final JsRangeSet removed; + protected final JsRangeSet modified; + protected final JsRangeSet fullRowSet; // cached copy in case it was requested, could be requested again private JsArray allRows; @@ -321,11 +352,8 @@ public UpdateEventData(WebBarrageSubscription subscription, int rowStyleColumn, subscription.getServerViewport(), subscription.isReversed())); } - /** - * The position of the first returned row. - * - * @return double - */ + // for ViewportData + @JsProperty public Double getOffset() { return offset; } @@ -335,7 +363,7 @@ public void setOffset(double offset) { } @Override - public JsArray<@TsTypeRef(SubscriptionRow.class) ? extends SubscriptionRow> getRows() { + public JsArray getRows() { if (allRows == null) { allRows = new JsArray<>(); RangeSet rowSet = subscription.getCurrentRowSet(); @@ -348,7 +376,7 @@ public void setOffset(double offset) { assert allRows.length == positions.size(); } } - return allRows; + return (JsArray) (JsArray) allRows; } protected SubscriptionRow makeRow(long index) { @@ -361,7 +389,7 @@ public Row get(int index) { } @Override - public SubscriptionRow get(long index) { + public Row get(long index) { return makeRow(index); } @@ -408,26 +436,6 @@ public Format getFormat(long index, Column column) { public JsArray getColumns() { return columns; } - - @Override - public JsRangeSet getAdded() { - return added; - } - - @Override - public JsRangeSet getRemoved() { - return removed; - } - - @Override - public JsRangeSet getModified() { - return modified; - } - - @Override - public JsRangeSet getFullIndex() { - return fullRowSet; - } } /** diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 26fe32ed820..2fb32a80d27 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -5,6 +5,7 @@ import com.google.flatbuffers.FlatBufferBuilder; import com.vertispan.tsdefs.annotations.TsName; +import com.vertispan.tsdefs.annotations.TsTypeRef; import elemental2.core.JsArray; import elemental2.dom.CustomEvent; import elemental2.dom.CustomEventInit; @@ -110,7 +111,7 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t if (rowsAdded.size() != rowsRemoved.size() && originalActive) { fireEventWithDetail(JsTable.EVENT_SIZECHANGED, size()); } - UpdateEventData detail = new UpdateEventData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, + UpdateEventData detail = new SubscriptionEventData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, rowsRemoved, totalMods, shifted); detail.setOffset(this.viewportRowSet.getFirstRow()); @@ -147,7 +148,7 @@ private void fireLegacyEventOnRowsetEntries(String eventName, UpdateEventData up if (hasListeners(eventName)) { rowset.indexIterator().forEachRemaining((long row) -> { CustomEventInit> addedEvent = CustomEventInit.create(); - addedEvent.setDetail(wrap(updateEventData.getRows().getAt((int) row), (int) row)); + addedEvent.setDetail(wrap((SubscriptionRow) updateEventData.getRows().getAt((int) row), (int) row)); fireEvent(eventName, addedEvent); }); } @@ -301,16 +302,16 @@ public void internalClose() { * @return Promise of {@link TableData}. */ @JsMethod - public Promise getViewportData() { + public Promise<@TsTypeRef(ViewportData.class) UpdateEventData> getViewportData() { retainForExternalUse(); return getInternalViewportData(); } - public Promise getInternalViewportData() { + public Promise<@TsTypeRef(ViewportData.class) UpdateEventData> getInternalViewportData() { if (isSubscriptionReady()) { return Promise.resolve(viewportData); } - final LazyPromise promise = new LazyPromise<>(); + final LazyPromise promise = new LazyPromise<>(); addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(viewportData)); return promise.asPromise(); } @@ -389,7 +390,7 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { doExchange.onEnd(status -> { if (status.isOk()) { // notify the caller that the snapshot is finished - resolve.onInvoke(new UpdateEventData(snapshot, rowStyleColumn, Js.uncheckedCast(columns), + resolve.onInvoke(new SubscriptionEventData(snapshot, rowStyleColumn, Js.uncheckedCast(columns), RangeSet.ofRange(0, rowsReceived.get() - 1), RangeSet.empty(), RangeSet.empty(), diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index dd3793a9566..04831819539 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -3,10 +3,11 @@ // package io.deephaven.web.client.api.subscription; -import com.vertispan.tsdefs.annotations.TsInterface; -import com.vertispan.tsdefs.annotations.TsName; +import com.vertispan.tsdefs.annotations.TsTypeRef; +import elemental2.core.JsArray; import io.deephaven.web.client.api.TableData; import jsinterop.annotations.JsProperty; +import jsinterop.annotations.JsType; /** * Extends {@link TableData}, but only contains data in the current viewport. The only API change from TableData is that @@ -17,9 +18,7 @@ * Do not assume that the first row in `rows` is the first visible row, because extra rows may be provided for easier * scrolling without going to the server. */ -// TODO re-add dh.ViewportRow -@TsInterface -@TsName(namespace = "dh") +@JsType(namespace = "dh") public interface ViewportData extends TableData { /** @@ -27,4 +26,25 @@ public interface ViewportData extends TableData { */ @JsProperty Double getOffset(); + + @JsProperty + @Override + JsArray getRows(); + + /** + * Reads a row object from the viewport, based on its position in the table. + */ + @Override + @TsTypeRef(ViewportRow.class) + default TableData.Row get(RowPositionUnion index) { + return TableData.super.get(index); + } + + /** + * This object may be pooled internally or discarded and not updated. Do not retain references to it. Instead, + * request the viewport again. + */ + @JsType(namespace = "dh") + interface ViewportRow extends TableData.Row { + } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 673d43532ae..2dc7ca3a682 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -3,8 +3,8 @@ // package io.deephaven.web.client.api.tree; -import com.vertispan.tsdefs.annotations.TsInterface; -import com.vertispan.tsdefs.annotations.TsName; +import com.vertispan.tsdefs.annotations.TsIgnore; +import com.vertispan.tsdefs.annotations.TsTypeRef; import com.vertispan.tsdefs.annotations.TsUnion; import com.vertispan.tsdefs.annotations.TsUnionMember; import elemental2.core.JsArray; @@ -93,7 +93,7 @@ * roll-up table, the totals only include leaf nodes (as non-leaf nodes are generated through grouping the contents of * the original table). Roll-ups also have the {@link JsRollupConfig#includeConstituents} property, indicating that a * {@link Column} in the tree may have a {@link Column#getConstituentType()} property reflecting that the type of cells - * where {@link TreeSubscription.TreeRow#hasChildren()} is false will be different from usual. + * where {@link TreeSubscription.TreeRowImpl#hasChildren()} is false will be different from usual. * */ @JsType(namespace = "dh", name = "TreeTable") @@ -163,7 +163,7 @@ private enum RebuildStep { private Column[] columns; private int updateInterval = 1000; - private TreeSubscription.TreeViewportData currentViewportData; + private TreeSubscription.TreeViewportDataImpl currentViewportData; private boolean alwaysFireNextEvent = false; @@ -379,46 +379,22 @@ private int constituentDepth() { } public class TreeSubscription extends AbstractTableSubscription { - @TsName(namespace = "dh") - public class TreeViewportData extends AbstractTableSubscription.UpdateEventData { + @TsIgnore + public class TreeViewportDataImpl extends AbstractTableSubscription.UpdateEventData + implements TreeViewportData { private final double treeSize; private final JsArray columns; - private TreeViewportData(WebBarrageSubscription subscription, int rowStyleColumn, JsArray columns, + private TreeViewportDataImpl(WebBarrageSubscription subscription, int rowStyleColumn, + JsArray columns, RangeSet added, RangeSet removed, RangeSet modified, ShiftedRange[] shifted) { super(subscription, rowStyleColumn, columns, added, removed, modified, shifted); - - // this.offset = offset; this.treeSize = barrageSubscription.getCurrentRowSet().size(); this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); } - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getAdded() { - return new JsRangeSet(RangeSet.empty()); - } - - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getRemoved() { - return new JsRangeSet(RangeSet.empty()); - } - - /** - * Always returns empty for TreeTable. - */ - @Override - public JsRangeSet getModified() { - return new JsRangeSet(RangeSet.empty()); - } - @Override public Any getData(int index, Column column) { Column sourceColumn = sourceColumns.get(column.getName()); @@ -479,70 +455,40 @@ public Format getFormat(long index, Column column) { return super.getFormat(index, sourceColumn); } - @JsProperty + @Override public JsArray getColumns() { // This looks like its superclass, but we're actually returning a different field return columns; } - // TODO need to restore this so the ts types make sense here - // @JsProperty - // @Override - // public JsArray getRows() { - // return (JsArray) super.getRows(); - // } - @Override protected SubscriptionRow makeRow(long index) { - return new TreeRow(subscription, index); + return new TreeRowImpl(subscription, index); } + @JsProperty public double getTreeSize() { return treeSize; } } - /** - * Row implementation that also provides additional read-only properties. represents visible rows in the table, - * but with additional properties to reflect the tree structure. - */ - @TsInterface - @TsName(namespace = "dh") - public class TreeRow extends SubscriptionRow { + public class TreeRowImpl extends SubscriptionRow implements TreeViewportData.TreeRow { - public TreeRow(WebBarrageSubscription subscription, long index) { + public TreeRowImpl(WebBarrageSubscription subscription, long index) { super(subscription, rowStyleColumn, index); } - /** - * True if this node is currently expanded to show its children; false otherwise. Those children will be the - * rows below this one with a greater depth than this one. - * - * @return boolean - */ - @JsProperty(name = "isExpanded") + @Override public boolean isExpanded() { return barrageSubscription.getData(index, rowExpandedCol.getIndex()).uncheckedCast() == Boolean.TRUE; } - /** - * True if this node has children and can be expanded; false otherwise. Note that this value may change when - * the table updates, depending on the table's configuration. - * - * @return boolean - */ - @JsProperty(name = "hasChildren") + @Override public boolean hasChildren() { return barrageSubscription.getData(index, rowExpandedCol.getIndex()).uncheckedCast() != null; } - /** - * The number of levels above this node; zero for top level nodes. Generally used by the UI to indent the - * row and its expand/collapse icon. - * - * @return int - */ - @JsProperty(name = "depth") + @Override public int depth() { return Js.coerceToInt(barrageSubscription.getData(index, rowDepthCol.getIndex())); } @@ -625,8 +571,9 @@ public void setViewport(double firstRow, double lastRow, JsArray columns @Override protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet totalMods, ShiftedRange[] shifted) { - TreeViewportData detail = new TreeViewportData(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, - rowsRemoved, totalMods, shifted); + TreeViewportDataImpl detail = + new TreeViewportDataImpl(barrageSubscription, rowStyleColumn, getColumns(), rowsAdded, + rowsRemoved, totalMods, shifted); detail.setOffset(this.serverViewport.getFirstRow()); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); @@ -706,8 +653,8 @@ private void replaceSubscription(RebuildStep step) { subscription.addEventListener(TreeSubscription.EVENT_UPDATED, (CustomEvent data) -> { - TreeSubscription.TreeViewportData detail = - (TreeSubscription.TreeViewportData) data.detail; + TreeSubscription.TreeViewportDataImpl detail = + (TreeSubscription.TreeViewportDataImpl) data.detail; handleUpdate(nextSort, nextFilters, detail, alwaysFireEvent); }); @@ -734,7 +681,7 @@ private IThenable.ThenOnFulfilledCallbackFn defer() { } private void handleUpdate(List nextSort, List nextFilters, - TreeSubscription.TreeViewportData viewportData, boolean alwaysFireEvent) { + TreeSubscription.TreeViewportDataImpl viewportData, boolean alwaysFireEvent) { JsLog.debug("tree table response arrived", viewportData); if (closed) { // ignore @@ -750,7 +697,7 @@ private void handleUpdate(List nextSort, List nextFilters this.filters = nextFilters; if (fireEvent) { - CustomEventInit updatedEvent = CustomEventInit.create(); + CustomEventInit updatedEvent = CustomEventInit.create(); updatedEvent.setDetail(viewportData); fireEvent(EVENT_UPDATED, updatedEvent); } @@ -832,7 +779,7 @@ static RowReferenceUnion of(@DoNotAutobox Object o) { @JsOverlay default boolean isTreeRow() { - return this instanceof TreeSubscription.TreeRow; + return this instanceof TreeSubscription.TreeRowImpl; } @JsOverlay @@ -842,7 +789,7 @@ default boolean isNumber() { @JsOverlay @TsUnionMember - default TreeSubscription.TreeRow asTreeRow() { + default TreeViewportData.TreeRow asTreeRow() { return Js.cast(this); } @@ -873,11 +820,11 @@ public void setExpanded(RowReferenceUnion row, boolean isExpanded, @JsOptional B action = ACTION_EXPAND; } - final TreeSubscription.TreeRow r; + final TreeSubscription.TreeRowImpl r; if (row.isNumber()) { - r = (TreeSubscription.TreeRow) currentViewportData.getRows().getAt((int) (row.asNumber())); + r = (TreeSubscription.TreeRowImpl) currentViewportData.getRows().getAt((int) (row.asNumber())); } else if (row.isTreeRow()) { - r = row.asTreeRow(); + r = (TreeSubscription.TreeRowImpl) row.asTreeRow(); } else { throw new IllegalArgumentException("row parameter must be an index or a row"); } @@ -902,11 +849,11 @@ public void collapseAll() { * @return boolean */ public boolean isExpanded(RowReferenceUnion row) { - final TreeSubscription.TreeRow r; + final TreeSubscription.TreeRowImpl r; if (row.isNumber()) { - r = (TreeSubscription.TreeRow) currentViewportData.getRows().getAt((int) (row.asNumber())); + r = (TreeSubscription.TreeRowImpl) currentViewportData.getRows().getAt((int) (row.asNumber())); } else if (row.isTreeRow()) { - r = row.asTreeRow(); + r = (TreeSubscription.TreeRowImpl) row.asTreeRow(); } else { throw new IllegalArgumentException("row parameter must be an index or a row"); } @@ -925,8 +872,8 @@ public void setViewport(double firstRow, double lastRow, @JsOptional @JsNullable replaceSubscription(RebuildStep.SUBSCRIPTION); } - public Promise getViewportData() { - LazyPromise promise = new LazyPromise<>(); + public Promise<@TsTypeRef(TreeViewportData.class) Object> getViewportData() { + LazyPromise promise = new LazyPromise<>(); if (currentViewportData == null) { // only one of these two will fire, and when they do, they'll remove both handlers. diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/TreeViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/TreeViewportData.java new file mode 100644 index 00000000000..6e00b17bf39 --- /dev/null +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/TreeViewportData.java @@ -0,0 +1,60 @@ +// +// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending +// +package io.deephaven.web.client.api.tree; + +import com.vertispan.tsdefs.annotations.TsTypeRef; +import elemental2.core.JsArray; +import io.deephaven.web.client.api.TableData; +import jsinterop.annotations.JsProperty; +import jsinterop.annotations.JsType; + +@JsType(namespace = "dh") +public interface TreeViewportData extends TableData { + @JsProperty + double getTreeSize(); + + @Override + @TsTypeRef(TreeRow.class) + default TableData.Row get(RowPositionUnion index) { + return TableData.super.get(index); + } + + @JsProperty + @Override + JsArray getRows(); + + /** + * Row implementation that also provides additional read-only properties. represents visible rows in the table, but + * with additional properties to reflect the tree structure. + */ + @JsType + interface TreeRow extends TableData.Row { + /** + * True if this node is currently expanded to show its children; false otherwise. Those children will be the + * rows below this one with a greater depth than this one. + * + * @return boolean + */ + @JsProperty(name = "isExpanded") + boolean isExpanded(); + + /** + * True if this node has children and can be expanded; false otherwise. Note that this value may change when the + * table updates, depending on the table's configuration. + * + * @return boolean + */ + @JsProperty(name = "hasChildren") + boolean hasChildren(); + + /** + * The number of levels above this node; zero for top level nodes. Generally used by the UI to indent the row + * and its expand/collapse icon. + * + * @return int + */ + @JsProperty(name = "depth") + int depth(); + } +} diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java index 5c7ef400cb1..ac70fd09128 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java @@ -9,6 +9,7 @@ import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; import io.deephaven.web.client.api.subscription.AbstractTableSubscription; +import io.deephaven.web.client.api.subscription.SubscriptionTableData; import io.deephaven.web.client.fu.JsSettings; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.fu.JsFunction; @@ -36,9 +37,10 @@ public ChartData(JsTable table) { } public void update(AbstractTableSubscription.UpdateEventData tableData) { - Iterator addedIterator = tableData.getAdded().getRange().rangeIterator(); - Iterator removedIterator = tableData.getRemoved().getRange().rangeIterator(); - Iterator modifiedIterator = tableData.getModified().getRange().rangeIterator(); + SubscriptionTableData data = (SubscriptionTableData) tableData; + Iterator addedIterator = data.getAdded().getRange().rangeIterator(); + Iterator removedIterator = data.getRemoved().getRange().rangeIterator(); + Iterator modifiedIterator = data.getModified().getRange().rangeIterator(); Range nextAdded = addedIterator.hasNext() ? addedIterator.next() : null; Range nextRemoved = removedIterator.hasNext() ? removedIterator.next() : null; @@ -130,7 +132,7 @@ public void update(AbstractTableSubscription.UpdateEventData tableData) { assert cachedData.values().stream().flatMap(m -> m.values().stream()).allMatch(arr -> arr .reduce((Object val, Any p1, int p2) -> ((Integer) val) + 1, 0) == indexes.length); - JsRangeSet fullIndex = tableData.getFullIndex(); + JsRangeSet fullIndex = ((SubscriptionTableData) tableData).getFullIndex(); PrimitiveIterator.OfLong iter = fullIndex.getRange().indexIterator(); for (int j = 0; j < indexes.length; j++) { assert indexes[j] == iter.nextLong(); diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java index a4b3fb9304e..54ace6db598 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/HierarchicalTableTestGwt.java @@ -6,6 +6,7 @@ import elemental2.dom.CustomEvent; import elemental2.promise.Promise; import io.deephaven.web.client.api.tree.JsTreeTable; +import io.deephaven.web.client.api.tree.TreeViewportData; public class HierarchicalTableTestGwt extends AbstractAsyncGwtTestCase { private static final Format red = new Format(0x1ff000001e0e0e0L, 0, null, null); @@ -39,20 +40,22 @@ public void testStaticTreeTable() { assertEquals("Parent", treeTable.getColumns().getAt(1).getName()); treeTable.setViewport(0, 99, treeTable.getColumns(), null); - return treeTable.getViewportData().then(data -> { - assertEquals(1d, data.getTreeSize()); + return treeTable.getViewportData() + .then(data -> Promise.resolve((TreeViewportData) data)) + .then(data -> { + assertEquals(1d, data.getTreeSize()); - treeTable.expand(JsTreeTable.RowReferenceUnion.of(0), null); - return treeTable.nextEvent( - JsTreeTable.EVENT_UPDATED, 2001d); - }).then(event -> { - assertEquals(10d, event.detail.getTreeSize()); + treeTable.expand(JsTreeTable.RowReferenceUnion.of(0), null); + return treeTable.nextEvent( + JsTreeTable.EVENT_UPDATED, 2001d); + }).then(event -> { + assertEquals(10d, event.detail.getTreeSize()); - treeTable.close(); + treeTable.close(); - assertTrue(treeTable.isClosed()); - return null; - }); + assertTrue(treeTable.isClosed()); + return null; + }); }) .then(this::finish).catch_(this::report); } @@ -77,16 +80,16 @@ public void testRefreshingTreeTable() { // Wait for the table to tick such that the first row has children // Read values from the one returned row return waitForEventWhere(treeTable, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> d.detail + (CustomEvent d) -> d.detail .getTreeSize() == 1 - && d.detail.getRows().getAtAsAny(0).cast() + && d.detail.getRows().getAtAsAny(0).cast() .hasChildren(), 10001) .then(JsTreeTable::getViewportData) + .then(data -> Promise.resolve((TreeViewportData) data)) .then(data -> { assertEquals(1.0, data.getTreeSize()); - JsTreeTable.TreeSubscription.TreeRow row1 = - (JsTreeTable.TreeSubscription.TreeRow) data.getRows().getAt(0); + TreeViewportData.TreeRow row1 = (TreeViewportData.TreeRow) data.getRows().getAt(0); Column timestampCol = treeTable.findColumn("Timestamp"); assertEquals(Format.EMPTY, data.getFormat(0, timestampCol)); assertEquals(Format.EMPTY, row1.getFormat(timestampCol)); @@ -109,14 +112,13 @@ public void testRefreshingTreeTable() { // Wait for the expand to occur and table to show all 10 rows return waitForEventWhere(treeTable, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> d.detail - .getTreeSize() == 10, + (CustomEvent d) -> d.detail.getTreeSize() == 10, 14004); }) .then(JsTreeTable::getViewportData) + .then(data -> Promise.resolve((TreeViewportData) data)) .then(data -> { - JsTreeTable.TreeSubscription.TreeRow row2 = - (JsTreeTable.TreeSubscription.TreeRow) data.getRows().getAt(1); + TreeViewportData.TreeRow row2 = (TreeViewportData.TreeRow) data.getRows().getAt(1); Column timestampCol = treeTable.findColumn("Timestamp"); assertEquals(Format.EMPTY, data.getFormat(1, timestampCol)); @@ -163,13 +165,12 @@ public void testTickingRollup() { // Wait for the table to tick such that we have at least 4 rows (root, three children) return waitForEventWhere(rollup, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> d.detail - .getTreeSize() == 4, + (CustomEvent d) -> d.detail.getTreeSize() == 4, 10002) .then(JsTreeTable::getViewportData) + .then(data -> Promise.resolve((TreeViewportData) data)) .then(data -> { - JsTreeTable.TreeSubscription.TreeRow row1 = - (JsTreeTable.TreeSubscription.TreeRow) data.getRows().getAt(0); + TreeViewportData.TreeRow row1 = (TreeViewportData.TreeRow) data.getRows().getAt(0); assertEquals(Format.EMPTY, data.getFormat(0, xCol)); assertEquals(Format.EMPTY, row1.getFormat(xCol)); @@ -187,8 +188,7 @@ public void testTickingRollup() { assertEquals(0d, row1.get(yCol).asDouble()); assertEquals(0d, yCol.get(row1).asDouble()); - JsTreeTable.TreeSubscription.TreeRow row2 = - (JsTreeTable.TreeSubscription.TreeRow) data.getRows().getAt(1); + TreeViewportData.TreeRow row2 = (TreeViewportData.TreeRow) data.getRows().getAt(1); assertEquals(Format.EMPTY, data.getFormat(1, xCol)); assertEquals(Format.EMPTY, row2.getFormat(xCol)); assertEquals(Format.EMPTY, xCol.getFormat(row2)); @@ -210,16 +210,13 @@ public void testTickingRollup() { // Wait for the expand to occur and table to show all 10 rows return waitForEventWhere(rollup, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> { - return d.detail - .getTreeSize() > 4; - }, + (CustomEvent d) -> d.detail.getTreeSize() > 4, 14008); }) .then(JsTreeTable::getViewportData) + .then(data -> Promise.resolve((TreeViewportData) data)) .then(data -> { - JsTreeTable.TreeSubscription.TreeRow row3 = - (JsTreeTable.TreeSubscription.TreeRow) data.getRows().getAt(2); + TreeViewportData.TreeRow row3 = (TreeViewportData.TreeRow) data.getRows().getAt(2); assertEquals(Format.EMPTY, data.getFormat(2, xCol)); assertEquals(Format.EMPTY, row3.getFormat(xCol)); @@ -240,10 +237,7 @@ public void testTickingRollup() { // Collapse row 2, wait until back to 4 rows rollup.collapse(JsTreeTable.RowReferenceUnion.of(1)); return waitForEventWhere(rollup, JsTreeTable.EVENT_UPDATED, - (CustomEvent d) -> { - return d.detail - .getTreeSize() == 4; - }, + (CustomEvent d) -> d.detail.getTreeSize() == 4, 14009); }) .then(event -> { From ffc35b0560890351ffd424707a667662085df102 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 22 Aug 2024 16:35:14 -0500 Subject: [PATCH 190/219] Add trailing newlines --- .github/CODEOWNERS | 2 +- Base/src/main/resources/io/deephaven/base/Base.gwt.xml | 2 +- IO/src/main/resources/io/deephaven/io/IO.gwt.xml | 2 +- Util/src/main/resources/io/deephaven/Util.gwt.xml | 2 +- .../chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml | 2 +- .../io/deephaven/engine/primitive/function/Function.gwt.xml | 2 +- .../resources/io/deephaven/extensions/barrage/Barrage.gwt.xml | 2 +- .../io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml | 2 +- .../web/super/io/deephaven/engine/rowset/RowSequence.java | 2 +- .../super/io/deephaven/engine/rowset/RowSequenceFactory.java | 2 +- .../deephaven/web/super/io/deephaven/engine/rowset/RowSet.java | 2 +- .../io/deephaven/engine/rowset/RowSetBuilderSequential.java | 2 +- .../web/super/io/deephaven/engine/rowset/RowSetFactory.java | 2 +- .../deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java | 2 +- .../web/super/io/deephaven/engine/rowset/WebRowSetImpl.java | 2 +- .../web/super/io/deephaven/engine/rowset/WritableRowSet.java | 2 +- .../deephaven/web/super/io/deephaven/util/MultiException.java | 2 +- .../resources/io/deephaven/web/super/java/io/DataInput.java | 2 +- .../resources/io/deephaven/web/super/java/io/DataOutput.java | 2 +- .../io/deephaven/web/super/java/io/DataOutputStream.java | 2 +- .../resources/io/deephaven/web/super/java/io/EOFException.java | 2 +- .../io/deephaven/web/super/java/lang/ref/Reference.java | 2 +- .../io/deephaven/web/super/java/lang/ref/ReferenceQueue.java | 2 +- web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml | 2 +- .../org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml | 2 +- .../src/main/resources/org/immutables/value/Immutables.gwt.xml | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e46eaff7a38..f9b7f12a289 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -24,4 +24,4 @@ /py @chipkent @jmao-denver @rcaudy /R @chipkent @alexpeters1208 @rcaudy *.proto @devinrsmith @nbauernfeind @niloc132 @rcaudy -*.gwt.xml @niloc132 @rcaudy @nbauernfeind \ No newline at end of file +*.gwt.xml @niloc132 @rcaudy @nbauernfeind diff --git a/Base/src/main/resources/io/deephaven/base/Base.gwt.xml b/Base/src/main/resources/io/deephaven/base/Base.gwt.xml index 15d72861b67..6cf87067eae 100644 --- a/Base/src/main/resources/io/deephaven/base/Base.gwt.xml +++ b/Base/src/main/resources/io/deephaven/base/Base.gwt.xml @@ -1,4 +1,4 @@ - \ No newline at end of file + diff --git a/IO/src/main/resources/io/deephaven/io/IO.gwt.xml b/IO/src/main/resources/io/deephaven/io/IO.gwt.xml index 268e5cadc63..51000b6cea3 100644 --- a/IO/src/main/resources/io/deephaven/io/IO.gwt.xml +++ b/IO/src/main/resources/io/deephaven/io/IO.gwt.xml @@ -1,3 +1,3 @@ - \ No newline at end of file + diff --git a/Util/src/main/resources/io/deephaven/Util.gwt.xml b/Util/src/main/resources/io/deephaven/Util.gwt.xml index aa09927115e..0a6e96c6463 100644 --- a/Util/src/main/resources/io/deephaven/Util.gwt.xml +++ b/Util/src/main/resources/io/deephaven/Util.gwt.xml @@ -7,4 +7,4 @@ - \ No newline at end of file + diff --git a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml index 4747791ddba..1f53956d9ae 100644 --- a/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml +++ b/engine/chunk/src/main/resources/io/deephaven/chunk/Chunk.gwt.xml @@ -3,4 +3,4 @@ - \ No newline at end of file + diff --git a/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml b/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml index ec945398599..c78151ef021 100644 --- a/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml +++ b/engine/primitive/src/main/resources/io/deephaven/engine/primitive/function/Function.gwt.xml @@ -1,3 +1,3 @@ - \ No newline at end of file + diff --git a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml index 15892b03e4c..a29af5b6ca8 100644 --- a/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml +++ b/extensions/barrage/src/main/resources/io/deephaven/extensions/barrage/Barrage.gwt.xml @@ -11,4 +11,4 @@ - \ No newline at end of file + diff --git a/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml b/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml index 98ceaadd944..9fd0a379083 100644 --- a/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml +++ b/web/client-api/src/main/resources/io/deephaven/barrage/flatbuf/BarrageFlatbufFormat.gwt.xml @@ -1,4 +1,4 @@ - \ No newline at end of file + diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java index 5b74e3c7a09..1ba218f9a73 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequence.java @@ -21,4 +21,4 @@ default void forAllRowKeys(java.util.function.LongConsumer lc) { } void forAllRowKeyRanges(LongRangeConsumer lrc); -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java index 90ad679f7cf..9175bd144c4 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSequenceFactory.java @@ -11,4 +11,4 @@ public class RowSequenceFactory { public static RowSequence forRange(final long firstRowKey, final long lastRowKey) { return new WebRowSetImpl(RangeSet.ofRange(firstRowKey, lastRowKey)); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java index fd2d63c1b40..a86d285d230 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSet.java @@ -11,4 +11,4 @@ public interface RowSet extends RowSequence, LongSizedDataStructure, SafeCloseab long get(long rowPosition); WritableRowSet intersect(RowSet rowSet); -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java index c7580427bc4..e14ffa21d9f 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetBuilderSequential.java @@ -8,4 +8,4 @@ public interface RowSetBuilderSequential extends LongRangeConsumer { void appendRange(long rangeFirstRowKey, long rangeLastRowKey); RowSet build(); -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java index c5a8716ce96..4dfe25e44e6 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/RowSetFactory.java @@ -20,4 +20,4 @@ public static RowSet fromRange(long first, long last) { public static RowSet flat(long size) { return size <= 0 ? empty() : new WebRowSetImpl(RangeSet.ofRange(0, size - 1)); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java index 871bc6b1b65..7b2673269d0 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetBuilderSequentialImpl.java @@ -18,4 +18,4 @@ public void accept(long first, long last) { public RowSet build() { return new WebRowSetImpl(rangeSet); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java index 99670fee366..aa026c56a9d 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java @@ -86,4 +86,4 @@ public boolean equals(Object obj) { public int hashCode() { return rangeSet.hashCode(); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java index 2b19f8847ca..85572b5939c 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WritableRowSet.java @@ -5,4 +5,4 @@ public interface WritableRowSet extends RowSet { WritableRowSet shift(long shiftAmount); -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java index f093435f499..46578c399b5 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java @@ -71,4 +71,4 @@ public String getMessage() { public String toString() { return getMessage(); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java index 5e00eeb89ef..8b8eb52520e 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataInput.java @@ -16,4 +16,4 @@ public interface DataInput { int readUnsignedShort() throws IOException; String readUTF() throws IOException; int skipBytes(int n) throws IOException; -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java index 1cbdbc50265..0534189f69a 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutput.java @@ -15,4 +15,4 @@ public interface DataOutput { void writeLong(long v) throws IOException; void writeShort(int v) throws IOException; void writeUTF(String s) throws IOException; -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java index 1e8accf6be3..74d1ee6b27d 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java @@ -101,4 +101,4 @@ public void writeChars(String s) throws IOException { public void writeUTF(String s) throws IOException { throw new UnsupportedOperationException("modified utf-8"); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java index cb5a6fa2207..ac8a7959205 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/EOFException.java @@ -7,4 +7,4 @@ public EOFException() { public EOFException(String s) { super(s); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java index 7c83a2f8987..2f9dc276b00 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/Reference.java @@ -41,4 +41,4 @@ protected Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java index e288d1c35a0..1e2dfd25483 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/lang/ref/ReferenceQueue.java @@ -2,4 +2,4 @@ public class ReferenceQueue { -} \ No newline at end of file +} diff --git a/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml b/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml index b10ac639bc3..187ac264524 100644 --- a/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml +++ b/web/client-api/src/main/resources/io/grpc/Grpc.gwt.xml @@ -1,3 +1,3 @@ - \ No newline at end of file + diff --git a/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml b/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml index 98ceaadd944..9fd0a379083 100644 --- a/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml +++ b/web/client-api/src/main/resources/org/apache/arrow/flatbuf/FlightFlatbufFormat.gwt.xml @@ -1,4 +1,4 @@ - \ No newline at end of file + diff --git a/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml b/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml index ec945398599..c78151ef021 100644 --- a/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml +++ b/web/client-api/src/main/resources/org/immutables/value/Immutables.gwt.xml @@ -1,3 +1,3 @@ - \ No newline at end of file + From 1e05eb3fb6d7eab1acc002dd076836b5fa0f5549 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 22 Aug 2024 19:40:48 -0500 Subject: [PATCH 191/219] DataOutputStream consistency --- .../web/super/java/io/DataOutputStream.java | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java index 74d1ee6b27d..29f380a4a12 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/java/io/DataOutputStream.java @@ -35,37 +35,37 @@ public void writeByte(int i) throws IOException { @Override public void writeShort(int i) throws IOException { - out.write((i >> 8) & 0xFF); - out.write((i >> 0) & 0xFF); + super.write((i >> 8) & 0xFF); + super.write((i >> 0) & 0xFF); written += 2; } @Override public void writeChar(int i) throws IOException { - out.write((i >> 8) & 0xFF); - out.write((i >> 0) & 0xFF); + super.write((i >> 8) & 0xFF); + super.write((i >> 0) & 0xFF); written += 2; } @Override public void writeInt(int i) throws IOException { - out.write((i >> 24) & 0xFF); - out.write((i >> 16) & 0xFF); - out.write((i >> 8) & 0xFF); - out.write((i >> 0) & 0xFF); + super.write((i >> 24) & 0xFF); + super.write((i >> 16) & 0xFF); + super.write((i >> 8) & 0xFF); + super.write((i >> 0) & 0xFF); written += 4; } @Override public void writeLong(long l) throws IOException { - out.write((int) (l >> 56) & 0xFF); - out.write((int) (l >> 48) & 0xFF); - out.write((int) (l >> 40) & 0xFF); - out.write((int) (l >> 32) & 0xFF); - out.write((int) (l >> 24) & 0xFF); - out.write((int) (l >> 16) & 0xFF); - out.write((int) (l >> 8) & 0xFF); - out.write((int) (l >> 0) & 0xFF); + super.write((int) (l >> 56) & 0xFF); + super.write((int) (l >> 48) & 0xFF); + super.write((int) (l >> 40) & 0xFF); + super.write((int) (l >> 32) & 0xFF); + super.write((int) (l >> 24) & 0xFF); + super.write((int) (l >> 16) & 0xFF); + super.write((int) (l >> 8) & 0xFF); + super.write((int) (l >> 0) & 0xFF); written += 8; } @@ -82,7 +82,7 @@ public void writeDouble(double v) throws IOException { @Override public void writeBytes(String s) throws IOException { for (int i = 0; i < s.length(); i++) { - out.write(s.charAt(i) & 0xFF); + super.write(s.charAt(i) & 0xFF); } written += s.length(); } @@ -91,8 +91,8 @@ public void writeBytes(String s) throws IOException { public void writeChars(String s) throws IOException { for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); - out.write((c >> 8) & 0xFF); - out.write(c & 0xFF); + super.write((c >> 8) & 0xFF); + super.write(c & 0xFF); } written += s.length() * 2; } From 7dd204f60829bdb31dbbb0e610bb2f3e923f8991 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 23 Aug 2024 11:22:30 -0500 Subject: [PATCH 192/219] Improve position lookup impl --- .../deephaven/web/shared/data/RangeSet.java | 182 ++++++++++-------- 1 file changed, 106 insertions(+), 76 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 226e1e585cc..65692622eaf 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -3,8 +3,10 @@ // package io.deephaven.web.shared.data; +import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.List; import java.util.PrimitiveIterator; import java.util.stream.LongStream; @@ -56,6 +58,9 @@ private static boolean orderedAndNonOverlapping(Range[] sortedRanges) { private Range[] sortedRanges = new Range[0]; + private int firstWrongCacheEntry = 0; + private long[] cardinality = new long[0]; + public void addRange(Range range) { // if empty, add as the only entry if (sortedRanges.length == 0) { @@ -68,11 +73,14 @@ public void addRange(Range range) { Range overlap = range.overlap(existing); if (overlap != null) { sortedRanges = new Range[] {overlap}; + poisonCache(0); } else if (existing.compareTo(range) < 0) { sortedRanges = new Range[] {existing, range}; + poisonCache(1); } else { assert existing.compareTo(range) > 0; sortedRanges = new Range[] {range, existing}; + poisonCache(0); } return; } @@ -112,6 +120,7 @@ public void addRange(Range range) { System.arraycopy(sortedRanges, 0, newArray, 0, index); } newArray[index] = merged; + poisonCache(index); if (end < sortedRanges.length - 1) { System.arraycopy(sortedRanges, end + 1, newArray, index + 1, sortedRanges.length - 1 - end); } @@ -155,6 +164,7 @@ public void addRange(Range range) { System.arraycopy(sortedRanges, 0, newArray, 0, proposedIndex); } newArray[proposedIndex] = merged; + poisonCache(proposedIndex); if (end < sortedRanges.length - 1) { System.arraycopy(sortedRanges, end + 1, newArray, proposedIndex + 1, sortedRanges.length - (end + 1)); } @@ -210,6 +220,7 @@ public void removeRange(Range range) { } replacement[index] = remaining[0]; replacement[index + 1] = remaining[1]; + poisonCache(index); System.arraycopy(sortedRanges, index + 1, replacement, index + 2, sortedRanges.length - (index + 1)); sortedRanges = replacement; @@ -219,6 +230,7 @@ public void removeRange(Range range) { if (remaining.length == 1) { // swap shortened item and move on sortedRanges[index] = remaining[0]; + poisonCache(index); } else { assert remaining.length == 0 : "Array contains a surprising number of items: " + remaining.length; @@ -235,6 +247,7 @@ public void removeRange(Range range) { System.arraycopy(sortedRanges, 0, replacement, 0, beforeCount); System.arraycopy(sortedRanges, beforeCount + toRemove, replacement, beforeCount, sortedRanges.length - beforeCount - toRemove); + poisonCache(beforeCount + 1); sortedRanges = replacement; } else { @@ -269,7 +282,11 @@ public int rangeCount() { * @return long */ public long size() { - return Arrays.stream(sortedRanges).mapToLong(Range::size).sum(); + ensureCardinalityCache(); + if (cardinality.length == 0) { + return 0; + } + return cardinality[cardinality.length - 1]; } public boolean isEmpty() { @@ -355,13 +372,31 @@ public int hashCode() { return Arrays.hashCode(sortedRanges); } - - Range[] getSortedRanges() { - return sortedRanges; + /** + * Indicates that this item has been changed, and should be recomputed. Stores the earliest offset that should + * be recomputed. + */ + private void poisonCache(int rangeIndex) { + firstWrongCacheEntry = Math.min(rangeIndex, firstWrongCacheEntry); } - void setSortedRanges(Range[] sortedRanges) { - this.sortedRanges = sortedRanges; + /** + * Ensures that the cardinality cache is correct, by correcting any values after the first wrong entry. + */ + private void ensureCardinalityCache() { + assert firstWrongCacheEntry >= 0;// && firstWrongCacheEntry < cardinality.length; + if (cardinality.length < sortedRanges.length) { + long[] replacement = new long[sortedRanges.length]; + System.arraycopy(cardinality, 0, replacement, 0, cardinality.length); + cardinality = replacement; + } + long cumulative = firstWrongCacheEntry == 0 ? 0 : cardinality[firstWrongCacheEntry - 1]; + for (int i = firstWrongCacheEntry; i < sortedRanges.length; i++) { + cumulative += sortedRanges[i].size(); + this.cardinality[i] = cumulative; + } + firstWrongCacheEntry = sortedRanges.length; + assert cardinality.length == sortedRanges.length; } public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { @@ -371,84 +406,79 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { if (positions.isEmpty() || isEmpty()) { return empty(); } - // if (positions.sortedRanges.length == 1) { - // // Desired range is contiguous - // List ranges = new ArrayList<>(); - // final long offset = positions.getFirstRow(); - // final long limit = positions.getLastRow(); - // int i = 0; - // long position = 0; - // for (; i < sortedRanges.length; i++) { - // Range r = sortedRanges[i]; - // if (offset < position + r.size()) { - // // Haven't hit the first range yet - // position += r.size(); - // continue; - // } - // // This range is part of the desired range, take some/all of it - // //TODO wrong, we want the min to measure the index of the range to take - // ranges.add(new Range(position, Math.min(r.getLast(), limit))); - // position += r.size(); - // i++; - // break; - // } - // for (; i < sortedRanges.length; i++) { - // Range r = sortedRanges[i]; - // if (limit > position + r.size()) { - // // Past the end of the desired positions - // break; - // } - //// ranges.add(new Range(r.getFirst(), Math.)) - // - // - // } - // - // - // return RangeSet.fromSortedRanges(ranges.toArray(Range[]::new)); - // } - - - PrimitiveIterator.OfLong positionIter = positions.indexIterator(); - PrimitiveIterator.OfLong valueIter = indexIterator(); - int i = 0; - RangeSet result = new RangeSet(); - - // There must be at least one of each - long position = positionIter.nextLong(); - long val = valueIter.nextLong(); - - done: do { - while (i != position) { - if (!valueIter.hasNext()) { - break done; - } - i++; - val = valueIter.nextLong(); - } + ensureCardinalityCache(); + + List ranges = new ArrayList<>(); - result.addRange(new Range(val, val)); + Iterator positionsIter = positions.rangeIterator(); - if (!positionIter.hasNext() || !valueIter.hasNext()) { + int from = 0; + while (positionsIter.hasNext()) { + Range nextPosRange = positionsIter.next(); + if (nextPosRange.getFirst() > size()) { + // Entire range is past the end - since ranges are sorted, we're done break; } - position = positionIter.nextLong(); - i++; - val = valueIter.nextLong(); - } while (true); + long rangeToTake = nextPosRange.size(); + + System.out.println("cardinality.length=" + cardinality.length); + int pos = Arrays.binarySearch(cardinality, from, cardinality.length, nextPosRange.getFirst()); + + long first; + Range target; + long offset; + if (pos >= 0) { + // Position matches the last item in the current range + pos++; + target = sortedRanges[pos]; +// first = target.getFirst(); + offset = 1; + } else { + // Position matches an earlier item in + pos = -pos - 1; + target = sortedRanges[pos]; + long c = cardinality[pos]; + offset = c - nextPosRange.getFirst();// positive value to offset backwards from the end of target + } + assert offset >= 0; + first = target.getLast() - offset + 1; + + + while (rangeToTake > 0) { + long count = Math.min(offset, target.size()); + Range res = new Range(first, first + count - 1); + assert count == res.size(); + ranges.add(res); + + rangeToTake -= count; + pos++; + if (pos >= sortedRanges.length) { + break; + } + target = sortedRanges[pos]; + first = target.getFirst(); + } - return result; + from = pos; + } + return RangeSet.fromSortedRanges(ranges.toArray(Range[]::new)); } public long get(long key) { - Iterator rangeIterator = rangeIterator(); - long position = 0; - while (rangeIterator.hasNext()) { - Range next = rangeIterator.next(); - if (key < position + next.size()) { - return next.getFirst() + (key - position); - } - position += next.size(); + if (key == 0) { + return sortedRanges[0].getFirst(); + } + ensureCardinalityCache(); + + int pos = Arrays.binarySearch(cardinality, key); + + if (pos >= 0) { + return sortedRanges[pos + 1].getFirst(); } - return -1; + Range target = sortedRanges[-pos - 1]; + long c = cardinality[-pos - 1]; + long offset = c - key;// positive value to offset backwards from the end of target + assert offset >= 0; + return target.getLast() - offset + 1; } } From 3db315ef1c785439f103c7505a92490ca9c1e85d Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Aug 2024 10:37:46 -0500 Subject: [PATCH 193/219] More tests, fixes for rangeset position support --- .../deephaven/web/shared/data/RangeSet.java | 33 +++++++++++-------- .../web/shared/data/RangeSetTest.java | 21 +++++++++++- 2 files changed, 40 insertions(+), 14 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 65692622eaf..889c0beed90 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -65,6 +65,7 @@ public void addRange(Range range) { // if empty, add as the only entry if (sortedRanges.length == 0) { sortedRanges = new Range[] {range}; + poisonCache(0); return; } // if one other entry, test if before, after, or overlapping @@ -282,10 +283,10 @@ public int rangeCount() { * @return long */ public long size() { - ensureCardinalityCache(); - if (cardinality.length == 0) { + if (sortedRanges.length == 0) { return 0; } + ensureCardinalityCache(); return cardinality[cardinality.length - 1]; } @@ -373,8 +374,8 @@ public int hashCode() { } /** - * Indicates that this item has been changed, and should be recomputed. Stores the earliest offset that should - * be recomputed. + * Indicates that this item has been changed, and should be recomputed. Stores the earliest offset that should be + * recomputed. */ private void poisonCache(int rangeIndex) { firstWrongCacheEntry = Math.min(rangeIndex, firstWrongCacheEntry); @@ -384,19 +385,22 @@ private void poisonCache(int rangeIndex) { * Ensures that the cardinality cache is correct, by correcting any values after the first wrong entry. */ private void ensureCardinalityCache() { - assert firstWrongCacheEntry >= 0;// && firstWrongCacheEntry < cardinality.length; + if (firstWrongCacheEntry == sortedRanges.length) { + return; + } if (cardinality.length < sortedRanges.length) { long[] replacement = new long[sortedRanges.length]; System.arraycopy(cardinality, 0, replacement, 0, cardinality.length); cardinality = replacement; } + assert firstWrongCacheEntry >= 0 : this; long cumulative = firstWrongCacheEntry == 0 ? 0 : cardinality[firstWrongCacheEntry - 1]; for (int i = firstWrongCacheEntry; i < sortedRanges.length; i++) { cumulative += sortedRanges[i].size(); this.cardinality[i] = cumulative; } firstWrongCacheEntry = sortedRanges.length; - assert cardinality.length == sortedRanges.length; + assert cardinality.length == sortedRanges.length : this; } public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { @@ -418,10 +422,12 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { if (nextPosRange.getFirst() > size()) { // Entire range is past the end - since ranges are sorted, we're done break; + } else if (nextPosRange.getFirst() == size()) { + ranges.add(new Range(getLastRow(), getLastRow())); + break; } long rangeToTake = nextPosRange.size(); - System.out.println("cardinality.length=" + cardinality.length); int pos = Arrays.binarySearch(cardinality, from, cardinality.length, nextPosRange.getFirst()); long first; @@ -431,7 +437,6 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { // Position matches the last item in the current range pos++; target = sortedRanges[pos]; -// first = target.getFirst(); offset = 1; } else { // Position matches an earlier item in @@ -440,12 +445,11 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { long c = cardinality[pos]; offset = c - nextPosRange.getFirst();// positive value to offset backwards from the end of target } - assert offset >= 0; + assert offset >= 0 && offset <= target.size() : offset; first = target.getLast() - offset + 1; - while (rangeToTake > 0) { - long count = Math.min(offset, target.size()); + long count = Math.min(offset, rangeToTake); Range res = new Range(first, first + count - 1); assert count == res.size(); ranges.add(res); @@ -457,11 +461,14 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { } target = sortedRanges[pos]; first = target.getFirst(); + offset = target.size(); } - from = pos; + from = pos - 1; } - return RangeSet.fromSortedRanges(ranges.toArray(Range[]::new)); + RangeSet result = RangeSet.fromSortedRanges(ranges.toArray(new Range[0])); + assert result.size() <= positions.size(); + return result; } public long get(long key) { diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 631fbafdfb2..3dba8512774 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -488,6 +488,25 @@ public void testSubsetForPostions() { assertEquals(RangeSet.ofItems(2, 4, 6), initialRange.subsetForPositions(RangeSet.ofRange(0, 2), false)); assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofRange(0, 3), false)); assertEquals(initialRange, initialRange.subsetForPositions(RangeSet.ofRange(0, 9), false)); + + initialRange = RangeSet.ofRange(10, 109); + assertEquals(RangeSet.ofItems(12, 14), initialRange.subsetForPositions(RangeSet.ofItems(2, 4), false)); + assertEquals(RangeSet.ofItems(12, 14), initialRange.subsetForPositions(RangeSet.ofItems(2, 4, 101), false)); + + assertEquals(RangeSet.empty(), RangeSet.empty().subsetForPositions(RangeSet.ofItems(0), false)); + assertEquals(RangeSet.ofItems(99), + RangeSet.ofRange(0, 99).subsetForPositions(RangeSet.ofRange(100, 104), false)); + + initialRange = RangeSet.empty(); + assertEquals(0, initialRange.size()); + initialRange.addRange(new Range(0, 1)); + assertEquals(2, initialRange.size()); + initialRange.addRange(new Range(2, 3)); + assertEquals(4, initialRange.size()); + initialRange.removeRange(new Range(0, 3)); + assertEquals(0, initialRange.size()); + initialRange.addRange(new Range(0, 1)); + assertEquals(2, initialRange.size()); } @Test @@ -496,7 +515,7 @@ public void testGet() { RangeSet initialRange = RangeSet.ofItems(rows); for (int i = 0; i < rows.length; i++) { - assertEquals(rows[i], initialRange.get(i)); + assertEquals("i=" + i, rows[i], initialRange.get(i)); } initialRange.removeRange(new Range(0, 1)); From a5872943b662167907448c6e753c32ca27abefc0 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Aug 2024 13:19:30 -0500 Subject: [PATCH 194/219] Add a more efficient impl for bulk adding ranges --- .../web/client/api/barrage/WebBarrageUtils.java | 2 +- .../api/barrage/data/WebBarrageSubscription.java | 6 +++--- .../java/io/deephaven/web/shared/data/RangeSet.java | 10 ++++++++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java index 90419c70057..648025723a5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/WebBarrageUtils.java @@ -115,7 +115,7 @@ public static ByteBuffer serializeRanges(Set rangeSets) { } else { s = new RangeSet(); for (RangeSet rangeSet : rangeSets) { - rangeSet.rangeIterator().forEachRemaining(s::addRange); + s.addRangeSet(rangeSet); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 520fe642b89..46fac4ed611 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -202,7 +202,7 @@ public void applyUpdates(WebBarrageMessage message) { } message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); - message.rowsAdded.rangeIterator().forEachRemaining(currentRowSet::addRange); + currentRowSet.addRangeSet(message.rowsAdded); state.setSize(message.rowsAdded.size()); dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, RangeSet.empty(), message.shifted, new BitSet(0)); @@ -305,12 +305,12 @@ public void applyUpdates(WebBarrageMessage message) { populatedRowsetShifter.flush(); } - message.rowsAdded.rangeIterator().forEachRemaining(currentRowSet::addRange); + currentRowSet.addRangeSet(message.rowsAdded); RangeSet totalMods = new RangeSet(); for (int i = 0; i < message.modColumnData.length; i++) { WebBarrageMessage.ModColumnData column = message.modColumnData[i]; - column.rowsModified.rangeIterator().forEachRemaining(totalMods::addRange); + totalMods.addRangeSet(column.rowsModified); } if (!message.rowsIncluded.isEmpty()) { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 889c0beed90..4633289aa55 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -61,6 +61,16 @@ private static boolean orderedAndNonOverlapping(Range[] sortedRanges) { private int firstWrongCacheEntry = 0; private long[] cardinality = new long[0]; + public void addRangeSet(RangeSet rangeSet) { + if (sortedRanges.length == 0 && rangeSet.sortedRanges.length != 0) { + sortedRanges = new Range[rangeSet.sortedRanges.length]; + System.arraycopy(rangeSet.sortedRanges, 0, sortedRanges, 0, sortedRanges.length); + poisonCache(0); + } else { + rangeSet.rangeIterator().forEachRemaining(this::addRange); + } + } + public void addRange(Range range) { // if empty, add as the only entry if (sortedRanges.length == 0) { From a9f8d2fb3ec60c68695a7d6cf325cb693dd1db10 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Aug 2024 17:06:37 -0500 Subject: [PATCH 195/219] Add tests, RangeSet fix from call with Nate Remove unless subsetForPosition calls for viewports Correctly free viewport rows when scrolling Avoid extra RangeSet copies when making rows --- .../barrage/data/WebBarrageSubscription.java | 14 +++++++------- .../AbstractTableSubscription.java | 18 ++++++------------ .../io/deephaven/web/shared/data/RangeSet.java | 3 +-- .../web/shared/data/RangeSetTest.java | 10 ++++++++++ 4 files changed, 24 insertions(+), 21 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 46fac4ed611..3a11b37158f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -228,6 +228,10 @@ public RedirectedImpl(ClientTableState state, ViewportChangedHandler viewportCha @Override public void applyUpdates(WebBarrageMessage message) { + RangeSet populatedRows = serverViewport != null + ? currentRowSet.subsetForPositions(serverViewport, serverReverseViewport) + : null; + if (message.isSnapshot) { updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); viewportChangedHandler.onServerViewportChanged(serverViewport, serverColumns, serverReverseViewport); @@ -235,10 +239,6 @@ public void applyUpdates(WebBarrageMessage message) { final boolean mightBeInitialSnapshot = getCurrentRowSet().isEmpty() && message.isSnapshot; - RangeSet populatedRows = - serverViewport != null ? currentRowSet.subsetForPositions(serverViewport, serverReverseViewport) - : null; - // Apply removes to our local rowset message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); if (serverViewport != null) { @@ -346,7 +346,8 @@ public void applyUpdates(WebBarrageMessage message) { PrimitiveIterator.OfLong destIter = destinationRowSet.indexIterator(); while (srcIter.hasNext()) { assert destIter.hasNext(); - redirectedIndexes.put(srcIter.next(), destIter.next()); + Long old = redirectedIndexes.put(srcIter.next(), destIter.next()); + assert old == null; } assert !destIter.hasNext(); } @@ -367,8 +368,7 @@ public void applyUpdates(WebBarrageMessage message) { } assert !destIterator.hasNext(); } - if (serverViewport != null) { - assert populatedRows != null; + if (serverViewport != null && populatedRows != null) { RangeSet newPopulated = currentRowSet.subsetForPositions(serverViewport, serverReverseViewport); newPopulated.rangeIterator().forEachRemaining(populatedRows::removeRange); freeRows(populatedRows); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index dd51f565104..d77b05c68e1 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -230,13 +230,10 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t barrageSubscription, rowStyleColumn, columns, - transformRowsetForConsumer(rowsAdded, barrageSubscription.getServerViewport(), - barrageSubscription.isReversed()), - transformRowsetForConsumer(rowsRemoved, barrageSubscription.getServerViewport(), - barrageSubscription.isReversed()), - transformRowsetForConsumer(totalMods, barrageSubscription.getServerViewport(), - barrageSubscription.isReversed()), - barrageSubscription.getServerViewport() != null ? null : shifted); + rowsAdded, + rowsRemoved, + totalMods, + shifted); CustomEventInit event = CustomEventInit.create(); event.setDetail(detail); fireEvent(TableSubscription.EVENT_UPDATED, event); @@ -366,14 +363,11 @@ public void setOffset(double offset) { public JsArray getRows() { if (allRows == null) { allRows = new JsArray<>(); - RangeSet rowSet = subscription.getCurrentRowSet(); - RangeSet positions = - transformRowsetForConsumer(rowSet, subscription.getServerViewport(), subscription.isReversed()); - positions.indexIterator().forEachRemaining((long index) -> { + fullRowSet.getRange().indexIterator().forEachRemaining((long index) -> { allRows.push(makeRow(index)); }); if (JsSettings.isDevMode()) { - assert allRows.length == positions.size(); + assert allRows.length == fullRowSet.getSize(); } } return (JsArray) (JsArray) allRows; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 4633289aa55..1ada17919b2 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -438,14 +438,13 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { } long rangeToTake = nextPosRange.size(); - int pos = Arrays.binarySearch(cardinality, from, cardinality.length, nextPosRange.getFirst()); + int pos = Arrays.binarySearch(cardinality, from, cardinality.length, nextPosRange.getFirst() + 1); long first; Range target; long offset; if (pos >= 0) { // Position matches the last item in the current range - pos++; target = sortedRanges[pos]; offset = 1; } else { diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 3dba8512774..cc7354a0f3e 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -507,6 +507,16 @@ public void testSubsetForPostions() { assertEquals(0, initialRange.size()); initialRange.addRange(new Range(0, 1)); assertEquals(2, initialRange.size()); + + initialRange = RangeSet.ofItems(1, 4, 5, 6); + assertEquals(RangeSet.ofItems(1, 4, 5, 6), initialRange.subsetForPositions(RangeSet.ofRange(0, 3), false)); + assertEquals(RangeSet.ofItems(1, 5, 6), initialRange.subsetForPositions(RangeSet.ofItems(0, 2, 3), false)); + assertEquals(RangeSet.ofItems(1, 4, 6), initialRange.subsetForPositions(RangeSet.ofItems(0, 1, 3), false)); + assertEquals(RangeSet.ofItems(1, 4, 5), initialRange.subsetForPositions(RangeSet.ofItems(0, 1, 2), false)); + assertEquals(RangeSet.ofItems(1, 5), initialRange.subsetForPositions(RangeSet.ofItems(0, 2), false)); + assertEquals(RangeSet.ofItems(4, 5), initialRange.subsetForPositions(RangeSet.ofRange(1, 2), false)); + assertEquals(RangeSet.ofItems(4, 5, 6), initialRange.subsetForPositions(RangeSet.ofRange(1, 3), false)); + assertEquals(RangeSet.ofItems(5, 6), initialRange.subsetForPositions(RangeSet.ofRange(2, 3), false)); } @Test From 1ffd8f3d5330644b272575e7cdb1746df99db900 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Aug 2024 18:57:30 -0500 Subject: [PATCH 196/219] Remove overzealous assert, spotless --- .../client/api/barrage/data/WebBarrageSubscription.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 3a11b37158f..712132f2a12 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -229,8 +229,8 @@ public RedirectedImpl(ClientTableState state, ViewportChangedHandler viewportCha @Override public void applyUpdates(WebBarrageMessage message) { RangeSet populatedRows = serverViewport != null - ? currentRowSet.subsetForPositions(serverViewport, serverReverseViewport) - : null; + ? currentRowSet.subsetForPositions(serverViewport, serverReverseViewport) + : null; if (message.isSnapshot) { updateServerViewport(message.snapshotRowSet, message.snapshotColumns, message.snapshotRowSetIsReversed); @@ -346,8 +346,7 @@ public void applyUpdates(WebBarrageMessage message) { PrimitiveIterator.OfLong destIter = destinationRowSet.indexIterator(); while (srcIter.hasNext()) { assert destIter.hasNext(); - Long old = redirectedIndexes.put(srcIter.next(), destIter.next()); - assert old == null; + redirectedIndexes.put(srcIter.next(), destIter.next()); } assert !destIter.hasNext(); } From 9d7046d0fbccfbdc859f0f9cacb11960863f65ae Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 26 Aug 2024 20:34:22 -0500 Subject: [PATCH 197/219] Review feedback, remove dead code, tidy TODOs --- .../java/io/deephaven/util/MultiException.java | 2 +- .../io/deephaven/web/client/api/TableData.java | 2 +- .../subscription/AbstractTableSubscription.java | 2 -- .../subscription/TableViewportSubscription.java | 2 +- .../web/client/api/tree/JsTreeTable.java | 3 --- .../web/client/state/ClientTableState.java | 17 ++++++++--------- .../deephaven/engine/rowset/WebRowSetImpl.java | 6 +++--- .../super/io/deephaven/util/MultiException.java | 2 +- 8 files changed, 15 insertions(+), 21 deletions(-) diff --git a/Util/src/main/java/io/deephaven/util/MultiException.java b/Util/src/main/java/io/deephaven/util/MultiException.java index f2dcc13a90c..e5ce94740c8 100644 --- a/Util/src/main/java/io/deephaven/util/MultiException.java +++ b/Util/src/main/java/io/deephaven/util/MultiException.java @@ -105,7 +105,7 @@ public void printStackTrace(PrintStream s) { @Override public String getMessage() { StringBuilder sb = new StringBuilder(); - sb.append(super.getMessage()).append(": \n"); + sb.append(super.getMessage()).append(":\n"); for (int i = 0; i < causes.length; i++) { sb.append("Cause ").append(i).append(": "); sb.append(causes[i].toString()); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java index 9ba1e7f3608..19df98c4221 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableData.java @@ -3,7 +3,6 @@ // package io.deephaven.web.client.api; -import com.vertispan.tsdefs.annotations.TsName; import com.vertispan.tsdefs.annotations.TsTypeRef; import com.vertispan.tsdefs.annotations.TsUnion; import com.vertispan.tsdefs.annotations.TsUnionMember; @@ -99,6 +98,7 @@ default Row get(RowPositionUnion index) { * @param column the column to read * @return the value in the table */ + // TODO (deephaven-core#5927) Consider a get/fillChunk API as an efficient alternative @JsMethod default Any getData(RowPositionUnion index, Column column) { if (index.isLongWrapper()) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index d77b05c68e1..0dbb3646cca 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -219,8 +219,6 @@ private void onDataChanged(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet to return; } - // TODO if this was a snapshot (or subscriptionReady was false for some interval), we probably need to - // notify of the entire table as a single big change notifyUpdate(rowsAdded, rowsRemoved, totalMods, shifted); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 2fb32a80d27..60c76eaf8c2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -177,7 +177,7 @@ public void fireEvent(String type, CustomEventInit init) { @Override public void fireEvent(String type, CustomEvent e) { - if (type.equals(e.type)) { + if (!type.equals(e.type)) { throw new IllegalArgumentException(type + " != " + e.type); } refire(e); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 2dc7ca3a682..7163ae034d9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -533,7 +533,6 @@ public Format getFormat(Column column) { } } - // TODO move to superclass and check on viewport change private RangeSet serverViewport; public TreeSubscription(ClientTableState state, WorkerConnection connection) { @@ -626,8 +625,6 @@ private void replaceSubscription(RebuildStep step) { BitSet columnsBitset = makeColumnSubscriptionBitset(); RangeSet range = RangeSet.ofRange((long) (double) firstRow, (long) (double) lastRow); - // Column[] queryColumns = this.columns; - boolean alwaysFireEvent = this.alwaysFireNextEvent; this.alwaysFireNextEvent = false; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index a4698bf1dd9..7eff8266775 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -301,19 +301,18 @@ public Class[] columnTypes() { } /** - * Returns the Java Class to represent the component type in any list/array type. Only used to detect if a byte[] - * should be used for holding data at this time. + * Returns the Java Class to represent the component type in any list/array type. At this time, this value is not + * used by the chunk reading implementation. */ public Class[] componentTypes() { - // The only componentType that matters is byte.class return Arrays.stream(tableDef.getColumns()).map(ColumnDefinition::getType).map(t -> { - if (!t.endsWith("[]")) { - return null; + // All arrays and vectors will be handled as objects for now. + // TODO (deephaven-core#2102) clarify if we need to handle these cases at all + if (t.endsWith("[]") || t.endsWith("Vector")) { + return Object.class; } - if (t.equals("io.deephaven.vector.ByteVector[]")) { - return byte.class; - } - return Object.class; + // Non-arrays or vectors should return null + return null; }).toArray(Class[]::new); } diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java index aa026c56a9d..abd578086a6 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/engine/rowset/WebRowSetImpl.java @@ -44,15 +44,15 @@ public void forAllRowKeyRanges(LongRangeConsumer lrc) { @Override public long get(long position) { - return -1; + return rangeSet.get(position); } @Override public WritableRowSet intersect(RowSet rowSet) { - return this; + throw new UnsupportedOperationException("intersect"); } @Override public WritableRowSet shift(long shiftAmount) { - return this; + throw new UnsupportedOperationException("shift"); } diff --git a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java index 46578c399b5..25c80ae422b 100644 --- a/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java +++ b/web/client-api/src/main/resources/io/deephaven/web/super/io/deephaven/util/MultiException.java @@ -57,7 +57,7 @@ public Throwable[] getCauses() { @Override public String getMessage() { StringBuilder sb = new StringBuilder(); - sb.append(super.getMessage()).append(": \n"); + sb.append(super.getMessage()).append(":\n"); for (int i = 0; i < causes.length; i++) { sb.append("Cause ").append(i).append(": "); sb.append(causes[i].toString()); From edf658a8d511a31da35bd146bd8a780577131676 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 30 Aug 2024 11:42:15 -0500 Subject: [PATCH 198/219] Avoid leaking unsubscribed tables --- .../web/client/api/subscription/AbstractTableSubscription.java | 1 + 1 file changed, 1 insertion(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index 0dbb3646cca..f0e5741b01e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -490,6 +490,7 @@ public JsArray getColumns() { * Stops the subscription on the server. */ public void close() { + state.unretain(this); if (doExchange != null) { doExchange.end(); doExchange.cancel(); From 82b57f777e57ba071bc18199f2941836c7ea8d24 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 30 Aug 2024 11:42:28 -0500 Subject: [PATCH 199/219] If a table is closed but viewport sub is used, remove reconnect --- .../api/subscription/TableViewportSubscription.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 60c76eaf8c2..182e2904c32 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -30,6 +30,7 @@ import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; +import io.deephaven.web.shared.fu.RemoverFn; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNullable; import jsinterop.annotations.JsOptional; @@ -63,6 +64,7 @@ public class TableViewportSubscription extends AbstractTableSubscription { private double refresh; private final JsTable original; + private final RemoverFn reconnectSubscription; /** * true if the sub is set up to not close the underlying table once the original table is done with it, otherwise @@ -74,7 +76,6 @@ public class TableViewportSubscription extends AbstractTableSubscription { */ private boolean retained; - private UpdateEventData viewportData; public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, @@ -84,10 +85,10 @@ public TableViewportSubscription(double firstRow, double lastRow, Column[] colum this.lastRow = lastRow; this.columns = columns; - refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; + this.refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; this.original = existingTable; - existingTable.addEventListener(JsTable.EVENT_RECONNECT, e -> { + this.reconnectSubscription = existingTable.addEventListener(JsTable.EVENT_RECONNECT, e -> { if (existingTable.state() == state()) { revive(); } @@ -285,6 +286,8 @@ public void internalClose() { // indicate that the base table shouldn't get events anymore, even if it is still retained elsewhere originalActive = false; + reconnectSubscription.remove(); + if (retained || status == Status.DONE) { // the JsTable has indicated it is no longer interested in this viewport, but other calling // code has retained it, keep it open for now. From ad0a79abe814facd5412b951e24507c803808c67 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 30 Aug 2024 12:08:12 -0500 Subject: [PATCH 200/219] Correctly grow populated rows while shifting --- .../web/client/api/barrage/data/WebBarrageSubscription.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 712132f2a12..f12dcd31321 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -276,6 +276,9 @@ public void applyUpdates(WebBarrageMessage message) { Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); assert oldValue == null : shiftedKey + " already has a value, " + oldValue; currentRowsetShifter.append(shiftedKey); + if (populatedRowsetShifter != null) { + populatedRowsetShifter.append(shiftedKey); + } } } if (hasReverseShift) { @@ -297,6 +300,9 @@ public void applyUpdates(WebBarrageMessage message) { Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); assert oldValue == null : shiftedKey + " already has a value, " + oldValue; currentRowsetShifter.append(shiftedKey); + if (populatedRowsetShifter != null) { + populatedRowsetShifter.append(shiftedKey); + } } } } From d50c8e46b56868810956a94e5498b6b99b2d5c2e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 30 Aug 2024 15:33:13 -0500 Subject: [PATCH 201/219] Correct assert in RangeSet, and limit range that is searched --- .../src/main/java/io/deephaven/web/shared/data/RangeSet.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 1ada17919b2..eba9f5461d4 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -410,7 +410,7 @@ private void ensureCardinalityCache() { this.cardinality[i] = cumulative; } firstWrongCacheEntry = sortedRanges.length; - assert cardinality.length == sortedRanges.length : this; + assert cardinality.length >= sortedRanges.length : this; } public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { @@ -438,7 +438,7 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { } long rangeToTake = nextPosRange.size(); - int pos = Arrays.binarySearch(cardinality, from, cardinality.length, nextPosRange.getFirst() + 1); + int pos = Arrays.binarySearch(cardinality, from, sortedRanges.length, nextPosRange.getFirst() + 1); long first; Range target; From 3cb16b4424697d5a86718fa29678734acd75f246 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 3 Sep 2024 15:57:43 -0500 Subject: [PATCH 202/219] Create an intermediate flattened state before viewport sub --- .../io/deephaven/web/client/api/JsTable.java | 2 +- .../TableViewportSubscription.java | 45 ++++++++++++++----- 2 files changed, 35 insertions(+), 12 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index b31f2c97808..21fd3ca9eff 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -715,7 +715,7 @@ public TableViewportSubscription setViewport(double firstRow, double lastRow, // rewrap current state in a new one, when ready the viewport will be applied TableViewportSubscription replacement = - new TableViewportSubscription(firstRow, lastRow, columnsCopy, updateIntervalMs, this); + TableViewportSubscription.make(firstRow, lastRow, columnsCopy, updateIntervalMs, this); subscriptions.put(currentState.getHandle(), replacement); return replacement; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 182e2904c32..d04d8b95ad0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -15,11 +15,13 @@ import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.FlattenRequest; import io.deephaven.util.mutable.MutableLong; import io.deephaven.web.client.api.Column; import io.deephaven.web.client.api.JsRangeSet; import io.deephaven.web.client.api.JsTable; import io.deephaven.web.client.api.TableData; +import io.deephaven.web.client.api.WorkerConnection; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.WebBarrageStreamReader; import io.deephaven.web.client.api.barrage.WebBarrageUtils; @@ -27,6 +29,7 @@ import io.deephaven.web.client.api.barrage.stream.BiDiStream; import io.deephaven.web.client.fu.JsLog; import io.deephaven.web.client.fu.LazyPromise; +import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; @@ -66,6 +69,9 @@ public class TableViewportSubscription extends AbstractTableSubscription { private final JsTable original; private final RemoverFn reconnectSubscription; + /** The initial state of the provided table, before flattening. */ + private final ClientTableState initialState; + /** * true if the sub is set up to not close the underlying table once the original table is done with it, otherwise * false. @@ -78,18 +84,35 @@ public class TableViewportSubscription extends AbstractTableSubscription { private UpdateEventData viewportData; - public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, - JsTable existingTable) { - super(existingTable.state(), existingTable.getConnection()); - this.firstRow = firstRow; - this.lastRow = lastRow; - this.columns = columns; + public static TableViewportSubscription make(double firstRow, double lastRow, Column[] columns, + Double updateIntervalMs, JsTable existingTable) { + ClientTableState tableState = existingTable.state(); + WorkerConnection connection = existingTable.getConnection(); + + ClientTableState flattenedState = connection.newState((callback, newState, metadata) -> { + FlattenRequest flatten = new FlattenRequest(); + flatten.setSourceId(tableState.getHandle().makeTableReference()); + flatten.setResultId(newState.getHandle().makeTicket()); + connection.tableServiceClient().flatten(flatten, metadata, callback::apply); + }, "flatten"); + flattenedState.refetch(null, connection.metadata()).then(result -> { + return null; + }, err -> { + return null; + }); + + TableViewportSubscription sub = new TableViewportSubscription(flattenedState, connection, existingTable); + sub.setInternalViewport(firstRow, lastRow, columns, updateIntervalMs, false); + return sub; + } - this.refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; + public TableViewportSubscription(ClientTableState state, WorkerConnection connection, JsTable existingTable) { + super(state, connection); this.original = existingTable; + initialState = existingTable.state(); this.reconnectSubscription = existingTable.addEventListener(JsTable.EVENT_RECONNECT, e -> { - if (existingTable.state() == state()) { + if (existingTable.state() == initialState) { revive(); } }); @@ -186,7 +209,7 @@ public void fireEvent(String type, CustomEvent e) { @Override public boolean hasListeners(String name) { - if (originalActive && state() == original.state()) { + if (originalActive && initialState == original.state()) { if (original.hasListeners(name)) { return true; } @@ -204,12 +227,12 @@ public boolean hasListeners(String name) { private void refire(CustomEvent e) { // explicitly calling super.fireEvent to avoid calling ourselves recursively super.fireEvent(e.type, e); - if (originalActive && state() == original.state()) { + if (originalActive && initialState == original.state()) { // When these fail to match, it probably means that the original's state was paused, but we're still // holding on to it. Since we haven't been internalClose()d yet, that means we're still waiting for // the new state to resolve or fail, so we can be restored, or stopped. In theory, we should put this // assert back, and make the pause code also tell us to pause. - // assert state() == original.state() : "Table owning this viewport subscription forgot to release it"; + // assert initialState == original.state() : "Table owning this viewport subscription forgot to release it"; original.fireEvent(e.type, e); } } From 4137015d8d21c1e0a553a32c63cb0e3814315575 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 3 Sep 2024 16:01:00 -0500 Subject: [PATCH 203/219] Correctly subset the removed rows when there is a viewport --- .../barrage/data/WebBarrageSubscription.java | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index f12dcd31321..70adc4700d2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -241,12 +241,18 @@ public void applyUpdates(WebBarrageMessage message) { // Apply removes to our local rowset message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); - if (serverViewport != null) { + + RangeSet removed = message.rowsRemoved; + if (populatedRows != null) { // limit the removed rows to what intersect the viewport - serverViewport.rangeIterator().forEachRemaining(r -> message.rowsRemoved.removeRange(r)); + RangeSet populatedCopy = populatedRows.copy(); + message.rowsRemoved.rangeIterator().forEachRemaining(populatedCopy::removeRange); + removed = populatedRows.copy(); + populatedCopy.rangeIterator().forEachRemaining(removed::removeRange); + removed.rangeIterator().forEachRemaining(populatedRows::removeRange); } // free rows that are no longer needed - freeRows(message.rowsRemoved); + freeRows(removed); // Apply shifts @@ -380,7 +386,7 @@ public void applyUpdates(WebBarrageMessage message) { } state.setSize(currentRowSet.size()); - dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, totalMods, message.shifted, + dataChangedHandler.onDataChanged(message.rowsAdded, removed, totalMods, message.shifted, modifiedColumnSet); } @@ -449,8 +455,10 @@ private RangeSet getFreeRows(long size) { private void freeRows(RangeSet removed) { RangeSetBulkHelper reusableHelper = new RangeSetBulkHelper(freeset, RangeSetBulkHelper.Operation.APPEND); removed.indexIterator().forEachRemaining((long index) -> { - long dest = redirectedIndexes.remove(index); - reusableHelper.append(dest); + Long dest = redirectedIndexes.remove(index); + if (dest != null) { + reusableHelper.append(dest); + } }); reusableHelper.flush(); } From 93220f4508415fed6ac2a4d09ef2e512f319ab61 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 3 Sep 2024 16:02:11 -0500 Subject: [PATCH 204/219] Try to more efficiently shift when there is a viewport --- .../barrage/data/WebBarrageSubscription.java | 46 ++++++++++++++++--- .../deephaven/web/shared/data/RangeSet.java | 34 ++++++++++++++ .../web/shared/data/ShiftedRange.java | 12 +++++ .../web/shared/data/RangeSetTest.java | 35 ++++++++++++++ 4 files changed, 121 insertions(+), 6 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 70adc4700d2..bc8679be58f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -259,7 +259,9 @@ public void applyUpdates(WebBarrageMessage message) { // Shift moved rows in the redir index boolean hasReverseShift = COLUMNS_AS_LIST; final ShiftedRange[] shiftedRanges = message.shifted; - RangeSetBulkHelper currentRowsetShifter = + RangeSetBulkHelper currentRowsetAddShifter = + new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); + RangeSetBulkHelper currentRowsetRemoveShifter = new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); RangeSetBulkHelper populatedRowsetShifter = populatedRows == null ? null : new RangeSetBulkHelper(populatedRows, RangeSetBulkHelper.Operation.APPEND); @@ -270,8 +272,15 @@ public void applyUpdates(WebBarrageMessage message) { hasReverseShift = true; continue; } - currentRowSet.removeRange(shiftedRange.getRange()); + currentRowsetRemoveShifter.appendRange(shiftedRange.getRange()); + currentRowsetAddShifter.appendRange(shiftedRange.getResultRange()); + + // test if shift is in populatedRows before continuing if (populatedRows != null) { + if (!populatedRows.includesAnyOf(shiftedRange.getRange())) { + // no rows were included, we can skip updating populatedRows and redirectedIndexes + continue; + } populatedRows.removeRange(shiftedRange.getRange()); } final NavigableSet toMove = redirectedIndexes.navigableKeySet() @@ -281,12 +290,12 @@ public void applyUpdates(WebBarrageMessage message) { long shiftedKey = key + offset; Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); assert oldValue == null : shiftedKey + " already has a value, " + oldValue; - currentRowsetShifter.append(shiftedKey); if (populatedRowsetShifter != null) { populatedRowsetShifter.append(shiftedKey); } } } + if (hasReverseShift) { for (int i = 0; i < shiftedRanges.length; ++i) { final ShiftedRange shiftedRange = shiftedRanges[i]; @@ -294,8 +303,14 @@ public void applyUpdates(WebBarrageMessage message) { if (offset > 0) { continue; } - currentRowSet.removeRange(shiftedRange.getRange()); + currentRowsetRemoveShifter.appendRange(shiftedRange.getRange()); + currentRowsetAddShifter.appendRange(shiftedRange.getResultRange()); + if (populatedRows != null) { + if (!populatedRows.includesAnyOf(shiftedRange.getRange())) { + // no rows were included, we can skip updating populatedRows and redirectedIndexes + continue; + } populatedRows.removeRange(shiftedRange.getRange()); } final NavigableSet toMove = redirectedIndexes.navigableKeySet() @@ -305,14 +320,14 @@ public void applyUpdates(WebBarrageMessage message) { long shiftedKey = key + offset; Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); assert oldValue == null : shiftedKey + " already has a value, " + oldValue; - currentRowsetShifter.append(shiftedKey); if (populatedRowsetShifter != null) { populatedRowsetShifter.append(shiftedKey); } } } } - currentRowsetShifter.flush(); + currentRowsetAddShifter.flush(); + currentRowsetRemoveShifter.flush(); if (populatedRowsetShifter != null) { populatedRowsetShifter.flush(); } @@ -510,6 +525,25 @@ public void append(long key) { } } + public void appendRange(Range range) { + if (currentFirst == -1) { + currentFirst = range.getFirst(); + currentLast = range.getLast(); + } else if (range.getFirst() == currentLast + 1) { + currentLast = range.getLast(); + } else if (range.getLast() == currentFirst - 1) { + currentFirst = range.getFirst(); + } else { + if (operation == Operation.APPEND) { + rangeSet.addRange(new Range(currentFirst, currentLast)); + } else { + rangeSet.removeRange(new Range(currentFirst, currentLast)); + } + currentFirst = range.getFirst(); + currentLast = range.getLast(); + } + } + public void flush() { if (currentFirst != -1) { if (operation == Operation.APPEND) { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index eba9f5461d4..80d7196c33c 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -346,6 +346,39 @@ public boolean includesAllOf(RangeSet other) { return true; } + public boolean includesAnyOf(Range range) { + if (isEmpty()) { + return false; + } + // search the sorted list of ranges and find where the current range starts. two case here when using + // binarySearch, either the removed range starts in the same place as an existing range starts, or + // it starts before an item (and so we check the item before and the item after) + int index = Arrays.binarySearch(sortedRanges, range); + if (index >= 0) { + // matching start position + return true; + } + // adjusted index notes where the item would be if it were added, minus _one more_ to see if + // it overlaps the item before it. To compute "the position where the new item belongs", we + // would do (-index - 1), so to examine one item prior to that we'll subtract one more. Then, + // to confirm that we are inserting in a valid position, take the max of that value and zero. + index = Math.max(0, -index - 2); + + // Check if there is any overlap with the prev item + Range target = sortedRanges[index]; + if (range.getFirst() <= target.getLast() && range.getLast() >= target.getFirst()) { + return true; + } + + // Check if there is a later item, and if there is an overlap with it + index++; + if (index >= sortedRanges.length) { + return false; + } + target = sortedRanges[index]; + return range.getFirst() <= target.getLast() && range.getLast() >= target.getFirst(); + } + @Override public String toString() { return "RangeSet{" + @@ -454,6 +487,7 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { long c = cardinality[pos]; offset = c - nextPosRange.getFirst();// positive value to offset backwards from the end of target } + assert target != null : this + ".subsetForPositions(" + positions + ")"; assert offset >= 0 && offset <= target.size() : offset; first = target.getLast() - offset + 1; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java index 4b1b587be97..0319f443394 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java @@ -31,4 +31,16 @@ public long getDelta() { public void setDelta(final long delta) { this.delta = delta; } + + public Range getResultRange() { + return new Range(range.getFirst() + delta, range.getLast() + delta); + } + + @Override + public String toString() { + return "ShiftedRange{" + + "range=" + range + + ", delta=" + delta + + '}'; + } } diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index cc7354a0f3e..54fe89c4855 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -212,6 +212,41 @@ public void testIncludesAllOf() { assertFalse(rangeSet.includesAllOf(RangeSet.ofRange(54, 60))); } + @Test + public void testIncludesAnyOf() { + RangeSet rangeSet = new RangeSet(); + rangeSet.addRange(new Range(0, 19)); + rangeSet.addRange(new Range(50, 54)); + + assertTrue(rangeSet.includesAnyOf(new Range(0, 19))); + assertTrue(rangeSet.includesAnyOf(new Range(50, 54))); + + rangeSet.indexIterator().forEachRemaining((LongConsumer) l -> { + assertTrue(rangeSet.includesAnyOf(new Range(l, l))); + }); + + assertTrue(rangeSet.includesAnyOf(new Range(0, 20))); + assertTrue(rangeSet.includesAnyOf(new Range(10, 20))); + assertTrue(rangeSet.includesAnyOf(new Range(19, 20))); + + assertTrue(rangeSet.includesAnyOf(new Range(19, 30))); + assertFalse(rangeSet.includesAnyOf(new Range(20, 30))); + assertFalse(rangeSet.includesAnyOf(new Range(21, 30))); + + assertFalse(rangeSet.includesAnyOf(new Range(30, 40))); + + assertFalse(rangeSet.includesAnyOf(new Range(40, 49))); + assertTrue(rangeSet.includesAnyOf(new Range(40, 50))); + assertFalse(rangeSet.includesAnyOf(new Range(40, 41))); + assertTrue(rangeSet.includesAnyOf(new Range(40, 54))); + + assertTrue(rangeSet.includesAnyOf(new Range(49, 54))); + assertTrue(rangeSet.includesAnyOf(new Range(50, 55))); + assertTrue(rangeSet.includesAnyOf(new Range(50, 60))); + + assertTrue(rangeSet.includesAnyOf(new Range(54, 60))); + assertFalse(rangeSet.includesAnyOf(new Range(55, 60))); + } @Test public void testRemove() { From 39a759a85ffeed05317697d5c0d3f41188aa53ca Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 3 Sep 2024 19:39:27 -0500 Subject: [PATCH 205/219] Extract two more methods to RangeSet --- .../barrage/data/WebBarrageSubscription.java | 24 ++++++------------- .../TableViewportSubscription.java | 2 +- .../deephaven/web/shared/data/RangeSet.java | 18 ++++++++++++++ 3 files changed, 26 insertions(+), 18 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index bc8679be58f..ea5e9862458 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -201,7 +201,8 @@ public void applyUpdates(WebBarrageMessage message) { } } - message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); + currentRowSet.removeRangeSet(message.rowsRemoved); + currentRowSet.addRangeSet(message.rowsAdded); state.setSize(message.rowsAdded.size()); dataChangedHandler.onDataChanged(message.rowsAdded, message.rowsRemoved, RangeSet.empty(), message.shifted, @@ -240,16 +241,12 @@ public void applyUpdates(WebBarrageMessage message) { final boolean mightBeInitialSnapshot = getCurrentRowSet().isEmpty() && message.isSnapshot; // Apply removes to our local rowset - message.rowsRemoved.rangeIterator().forEachRemaining(currentRowSet::removeRange); + currentRowSet.removeRangeSet(message.rowsRemoved); RangeSet removed = message.rowsRemoved; if (populatedRows != null) { // limit the removed rows to what intersect the viewport - RangeSet populatedCopy = populatedRows.copy(); - message.rowsRemoved.rangeIterator().forEachRemaining(populatedCopy::removeRange); - removed = populatedRows.copy(); - populatedCopy.rangeIterator().forEachRemaining(removed::removeRange); - removed.rangeIterator().forEachRemaining(populatedRows::removeRange); + removed = populatedRows.extract(message.rowsRemoved); } // free rows that are no longer needed freeRows(removed); @@ -257,7 +254,7 @@ public void applyUpdates(WebBarrageMessage message) { // Apply shifts // Shift moved rows in the redir index - boolean hasReverseShift = COLUMNS_AS_LIST; + boolean hasReverseShift = false; final ShiftedRange[] shiftedRanges = message.shifted; RangeSetBulkHelper currentRowsetAddShifter = new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); @@ -341,9 +338,6 @@ public void applyUpdates(WebBarrageMessage message) { } if (!message.rowsIncluded.isEmpty()) { - // int addBatchSize = (int) Math.min(message.rowsIncluded.size(), 1 << 16);//reexamine this constant in - // light of browsers being browsers - if (mightBeInitialSnapshot) { capacity = message.rowsIncluded.size(); Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); @@ -351,10 +345,6 @@ public void applyUpdates(WebBarrageMessage message) { } RangeSet destinationRowSet = getFreeRows(message.rowsIncluded.size()); - // RangeSet destinationRowSet = new RangeSet(); - // message.rowsIncluded.indexIterator().forEachRemaining((long row) -> { - // destinationRowSet.addRange(new Range(row, row)); - // }); for (int ii = 0; ii < message.addColumnData.length; ii++) { if (isSubscribedColumn(ii)) { @@ -396,7 +386,7 @@ public void applyUpdates(WebBarrageMessage message) { } if (serverViewport != null && populatedRows != null) { RangeSet newPopulated = currentRowSet.subsetForPositions(serverViewport, serverReverseViewport); - newPopulated.rangeIterator().forEachRemaining(populatedRows::removeRange); + populatedRows.removeRangeSet(newPopulated); freeRows(populatedRows); } @@ -417,7 +407,7 @@ private RangeSet getFreeRows(long size) { if (size <= 0) { return RangeSet.empty(); } - boolean needsResizing = COLUMNS_AS_LIST; + boolean needsResizing = false; final RangeSet result; if (capacity == 0) { capacity = Long.highestOneBit(Math.max(size * 2, 8)); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index d04d8b95ad0..e764f9adfe8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -147,7 +147,7 @@ protected void notifyUpdate(RangeSet rowsAdded, RangeSet rowsRemoved, RangeSet t if (hasListeners(EVENT_ROWADDED) || hasListeners(EVENT_ROWREMOVED) || hasListeners(EVENT_ROWUPDATED)) { RangeSet modifiedCopy = totalMods.copy(); // exclude added items from being marked as modified, since we're hiding shifts from api consumers - rowsAdded.rangeIterator().forEachRemaining(modifiedCopy::removeRange); + modifiedCopy.removeRangeSet(rowsAdded); RangeSet removedCopy = rowsRemoved.copy(); RangeSet addedCopy = rowsAdded.copy(); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 80d7196c33c..802836223dc 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -183,6 +183,10 @@ public void addRange(Range range) { } } + public void removeRangeSet(RangeSet rangeSet) { + rangeSet.rangeIterator().forEachRemaining(this::removeRange); + } + public void removeRange(Range range) { // if empty, nothing to do if (sortedRanges.length == 0) { @@ -531,4 +535,18 @@ public long get(long key) { assert offset >= 0; return target.getLast() - offset + 1; } + + /** + * Removes all keys in the provided rangeset that are present in this. + * @param other the rows to remove + * @return any removed keys + */ + public RangeSet extract(RangeSet other) { + RangeSet populatedCopy = copy(); + populatedCopy.removeRangeSet(other); + RangeSet removed = copy(); + removed.removeRangeSet(populatedCopy); + removeRangeSet(removed); + return removed; + } } From 0f4d0b68d3a46dbc4646a43759ec92fa3d3911df Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 4 Sep 2024 10:03:55 -0500 Subject: [PATCH 206/219] Use JVM impl for getFreeRows --- .../barrage/data/WebBarrageSubscription.java | 37 ++++--------------- 1 file changed, 7 insertions(+), 30 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index ea5e9862458..83bc108f3ba 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -411,49 +411,26 @@ private RangeSet getFreeRows(long size) { final RangeSet result; if (capacity == 0) { capacity = Long.highestOneBit(Math.max(size * 2, 8)); - freeset.addRange(new Range(size, capacity - 1)); - result = new RangeSet(); - result.addRange(new Range(0, size - 1)); + freeset.addRange(new Range(0, capacity - 1)); needsResizing = true; } else { - result = new RangeSet(); - Iterator iterator = freeset.rangeIterator(); - int required = (int) size; - while (required > 0 && iterator.hasNext()) { - Range next = iterator.next(); - Range range = - next.size() < required ? next : new Range(next.getFirst(), next.getFirst() + required - 1); - result.addRange(range); - freeset.removeRange(range); - required -= (int) next.size(); - } - - if (required > 0) { - // we need more, allocate extra, return some, grow the freeset for next time + if (freeset.size() < size) { long usedSlots = capacity - freeset.size(); long prevCapacity = capacity; do { capacity *= 2; - } while ((capacity - usedSlots) < required); - - result.addRange(new Range(prevCapacity, prevCapacity + required - 1)); - - freeset = new RangeSet(); - if (capacity - prevCapacity > required) { - // extra was allocated for next time - freeset.addRange(new Range(prevCapacity + required, capacity - 1)); - } + } while ((capacity - usedSlots) < size); + freeset.addRange(new Range(prevCapacity, capacity - 1)); needsResizing = true; } } - if (needsResizing) { Arrays.stream(destSources).forEach(s -> s.ensureCapacity(capacity)); } - - assert result.size() == size; - + result = freeset.subsetForPositions(RangeSet.ofRange(0, size - 1), false); + freeset.removeRange(new Range(0, result.getLastRow())); + assert result.size() == size : result.size() + " == " + size; return result; } From bb62b5e94b4e967f1c95f9223d5bec7b02e033b8 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 4 Sep 2024 12:09:51 -0500 Subject: [PATCH 207/219] Rework RangeSet to use ArrayList instead of arrays --- .../barrage/data/WebBarrageSubscription.java | 1 - .../deephaven/web/shared/data/RangeSet.java | 165 ++++++++---------- .../web/shared/data/RangeSetTest.java | 17 +- 3 files changed, 83 insertions(+), 100 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 83bc108f3ba..5f8fcc9a120 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -17,7 +17,6 @@ import java.util.Arrays; import java.util.BitSet; -import java.util.Iterator; import java.util.NavigableSet; import java.util.NoSuchElementException; import java.util.PrimitiveIterator; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 802836223dc..f25fe2057d4 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -5,6 +5,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.PrimitiveIterator; @@ -41,7 +42,7 @@ public static RangeSet ofItems(long... items) { public static RangeSet fromSortedRanges(Range[] sortedRanges) { assert orderedAndNonOverlapping(sortedRanges) : Arrays.toString(sortedRanges); RangeSet rangeSet = new RangeSet(); - rangeSet.sortedRanges = sortedRanges; + rangeSet.sortedRanges.addAll(Arrays.asList(sortedRanges)); return rangeSet; } @@ -56,15 +57,14 @@ private static boolean orderedAndNonOverlapping(Range[] sortedRanges) { return true; } - private Range[] sortedRanges = new Range[0]; + private List sortedRanges = new ArrayList<>(); private int firstWrongCacheEntry = 0; private long[] cardinality = new long[0]; public void addRangeSet(RangeSet rangeSet) { - if (sortedRanges.length == 0 && rangeSet.sortedRanges.length != 0) { - sortedRanges = new Range[rangeSet.sortedRanges.length]; - System.arraycopy(rangeSet.sortedRanges, 0, sortedRanges, 0, sortedRanges.length); + if (rangeCount() == 0 && rangeSet.rangeCount() != 0) { + sortedRanges = new ArrayList<>(rangeSet.sortedRanges); poisonCache(0); } else { rangeSet.rangeIterator().forEachRemaining(this::addRange); @@ -73,40 +73,40 @@ public void addRangeSet(RangeSet rangeSet) { public void addRange(Range range) { // if empty, add as the only entry - if (sortedRanges.length == 0) { - sortedRanges = new Range[] {range}; + if (rangeCount() == 0) { + sortedRanges.add(range); poisonCache(0); return; } // if one other entry, test if before, after, or overlapping - if (sortedRanges.length == 1) { - Range existing = sortedRanges[0]; + if (rangeCount() == 1) { + Range existing = sortedRanges.get(0); Range overlap = range.overlap(existing); if (overlap != null) { - sortedRanges = new Range[] {overlap}; + sortedRanges.set(0, overlap); poisonCache(0); } else if (existing.compareTo(range) < 0) { - sortedRanges = new Range[] {existing, range}; + sortedRanges.add(range); poisonCache(1); } else { assert existing.compareTo(range) > 0; - sortedRanges = new Range[] {range, existing}; + sortedRanges.add(0, range); poisonCache(0); } return; } // if more than one other entry, binarySearch to find before and after entry, and test both for overlapping - int index = Arrays.binarySearch(sortedRanges, range); + int index = Collections.binarySearch(sortedRanges, range); if (index >= 0) { // starting with that item, check to see if each following item is part of the existing range // we know that no range before it will need to be considered, since the set should previously // have been broken into non-contiguous ranges Range merged = range; - int end = sortedRanges.length - 1; - for (int i = index; i < sortedRanges.length; i++) { - Range existing = sortedRanges[i]; + int end = rangeCount() - 1; + for (int i = index; i < rangeCount(); i++) { + Range existing = sortedRanges.get(i); // there is an item with the same start, either new item falls within it, or should replace it Range overlap = existing.overlap(merged); @@ -125,23 +125,15 @@ public void addRange(Range range) { } // splice out [index, end] items, replacing with the newly grown overlap object (may be the same // size, and only replacing one item) - int newLength = sortedRanges.length - (end - index); - Range[] newArray = new Range[newLength]; - if (index > 0) { - System.arraycopy(sortedRanges, 0, newArray, 0, index); - } - newArray[index] = merged; + sortedRanges.set(index, merged); + sortedRanges.subList(index + 1, end + 1).clear(); poisonCache(index); - if (end < sortedRanges.length - 1) { - System.arraycopy(sortedRanges, end + 1, newArray, index + 1, sortedRanges.length - 1 - end); - } - sortedRanges = newArray; } else { int proposedIndex = -(index) - 1; Range merged = range; // test the item before the proposed location (if any), try to merge it if (proposedIndex > 0) { - Range before = sortedRanges[proposedIndex - 1]; + Range before = sortedRanges.get(proposedIndex - 1); Range overlap = before.overlap(merged); if (overlap != null) { // replace the range that we are merging, and start the slice here instead @@ -156,10 +148,10 @@ public void addRange(Range range) { // instead of shrinking. // if we never find an item we cannot merge with, the end of the replaced range is the last item of the old // array, which could result in the new array having as little as only 1 item - int end = sortedRanges.length - 1; + int end = rangeCount() - 1; // until we quit finding matches, test subsequent items - for (int i = proposedIndex; i < sortedRanges.length; i++) { - Range existing = sortedRanges[i]; + for (int i = proposedIndex; i < rangeCount(); i++) { + Range existing = sortedRanges.get(i); Range overlap = existing.overlap(merged); if (overlap == null) { // stop at the item before this one @@ -168,18 +160,17 @@ public void addRange(Range range) { } merged = overlap; } - int newLength = sortedRanges.length - (end - proposedIndex); - assert newLength > 0 && newLength <= sortedRanges.length + 1; - Range[] newArray = new Range[newLength]; - if (proposedIndex > 0) { - System.arraycopy(sortedRanges, 0, newArray, 0, proposedIndex); + int newLength = rangeCount() - (end - proposedIndex); + assert newLength > 0 && newLength <= rangeCount() + 1; + if (end == proposedIndex) { + sortedRanges.set(proposedIndex, merged); + } else if (newLength < rangeCount()) { + sortedRanges.set(proposedIndex, merged); + sortedRanges.subList(proposedIndex + 1, end + 1).clear(); + } else { + sortedRanges.add(proposedIndex, merged); } - newArray[proposedIndex] = merged; poisonCache(proposedIndex); - if (end < sortedRanges.length - 1) { - System.arraycopy(sortedRanges, end + 1, newArray, proposedIndex + 1, sortedRanges.length - (end + 1)); - } - sortedRanges = newArray; } } @@ -189,14 +180,14 @@ public void removeRangeSet(RangeSet rangeSet) { public void removeRange(Range range) { // if empty, nothing to do - if (sortedRanges.length == 0) { + if (rangeCount() == 0) { return; } // search the sorted list of ranges and find where the current range starts. two case here when using // binarySearch, either the removed range starts in the same place as an existing range starts, or // it starts before an item (and so we check the item before and the item after) - int index = Arrays.binarySearch(sortedRanges, range); + int index = Collections.binarySearch(sortedRanges, range); if (index < 0) { // adjusted index notes where the item would be if it were added, minus _one more_ to see if // it overlaps the item before it. To compute "the position where the new item belongs", we @@ -207,8 +198,8 @@ public void removeRange(Range range) { int beforeCount = -1; int toRemove = 0; - for (; index < sortedRanges.length; index++) { - Range toCheck = sortedRanges[index]; + for (; index < rangeCount(); index++) { + Range toCheck = sortedRanges.get(index); if (toCheck.getFirst() > range.getLast()) { break;// done, this is entirely after the range we're removing } @@ -229,22 +220,15 @@ public void removeRange(Range range) { : "Expected that no previous items in the RangeSet had been removed toRemove=" + toRemove + ", beforeCount=" + beforeCount; - Range[] replacement = new Range[sortedRanges.length + 1]; - if (index > 0) { - System.arraycopy(sortedRanges, 0, replacement, 0, index); - } - replacement[index] = remaining[0]; - replacement[index + 1] = remaining[1]; + sortedRanges.set(index, remaining[0]); + sortedRanges.add(index + 1, remaining[1]); poisonCache(index); - System.arraycopy(sortedRanges, index + 1, replacement, index + 2, sortedRanges.length - (index + 1)); - - sortedRanges = replacement; return; } if (remaining.length == 1) { // swap shortened item and move on - sortedRanges[index] = remaining[0]; + sortedRanges.set(index, remaining[0]); poisonCache(index); } else { assert remaining.length == 0 : "Array contains a surprising number of items: " + remaining.length; @@ -258,13 +242,8 @@ public void removeRange(Range range) { } if (toRemove > 0) { - Range[] replacement = new Range[sortedRanges.length - toRemove]; - System.arraycopy(sortedRanges, 0, replacement, 0, beforeCount); - System.arraycopy(sortedRanges, beforeCount + toRemove, replacement, beforeCount, - sortedRanges.length - beforeCount - toRemove); - poisonCache(beforeCount + 1); - - sortedRanges = replacement; + sortedRanges.subList(beforeCount, beforeCount + toRemove).clear(); + poisonCache(beforeCount); } else { assert beforeCount == -1 : "No items to remove, but beforeCount set?"; } @@ -276,17 +255,18 @@ public void removeRange(Range range) { * @return Iterator of {@link Range} */ public Iterator rangeIterator() { - return Arrays.asList(sortedRanges).iterator(); + return sortedRanges.iterator(); } public PrimitiveIterator.OfLong indexIterator() { - return Arrays.stream(sortedRanges) + return sortedRanges + .stream() .flatMapToLong(range -> LongStream.rangeClosed(range.getFirst(), range.getLast())) .iterator(); } public int rangeCount() { - return sortedRanges.length; + return sortedRanges.size(); } /** @@ -297,15 +277,15 @@ public int rangeCount() { * @return long */ public long size() { - if (sortedRanges.length == 0) { + if (rangeCount() == 0) { return 0; } ensureCardinalityCache(); - return cardinality[cardinality.length - 1]; + return cardinality[sortedRanges.size() - 1]; } public boolean isEmpty() { - return sortedRanges.length == 0; + return rangeCount() == 0; } public boolean contains(long value) { @@ -357,7 +337,7 @@ public boolean includesAnyOf(Range range) { // search the sorted list of ranges and find where the current range starts. two case here when using // binarySearch, either the removed range starts in the same place as an existing range starts, or // it starts before an item (and so we check the item before and the item after) - int index = Arrays.binarySearch(sortedRanges, range); + int index = Collections.binarySearch(sortedRanges, range); if (index >= 0) { // matching start position return true; @@ -369,38 +349,38 @@ public boolean includesAnyOf(Range range) { index = Math.max(0, -index - 2); // Check if there is any overlap with the prev item - Range target = sortedRanges[index]; + Range target = sortedRanges.get(index); if (range.getFirst() <= target.getLast() && range.getLast() >= target.getFirst()) { return true; } // Check if there is a later item, and if there is an overlap with it index++; - if (index >= sortedRanges.length) { + if (index >= rangeCount()) { return false; } - target = sortedRanges[index]; + target = sortedRanges.get(index); return range.getFirst() <= target.getLast() && range.getLast() >= target.getFirst(); } @Override public String toString() { return "RangeSet{" + - "sortedRanges=" + Arrays.toString(sortedRanges) + + "sortedRanges=" + sortedRanges + '}'; } public long getFirstRow() { - return sortedRanges[0].getFirst(); + return sortedRanges.get(0).getFirst(); } public long getLastRow() { - return sortedRanges[sortedRanges.length - 1].getLast(); + return sortedRanges.get(rangeCount() - 1).getLast(); } public RangeSet copy() { RangeSet copy = new RangeSet(); - copy.sortedRanges = Arrays.copyOf(sortedRanges, sortedRanges.length); + copy.sortedRanges = new ArrayList<>(sortedRanges); return copy; } @@ -412,12 +392,12 @@ public boolean equals(Object o) { return false; final RangeSet rangeSet = (RangeSet) o; - return Arrays.equals(sortedRanges, rangeSet.sortedRanges); + return sortedRanges.equals(rangeSet.sortedRanges); } @Override public int hashCode() { - return Arrays.hashCode(sortedRanges); + return sortedRanges.hashCode(); } /** @@ -432,22 +412,22 @@ private void poisonCache(int rangeIndex) { * Ensures that the cardinality cache is correct, by correcting any values after the first wrong entry. */ private void ensureCardinalityCache() { - if (firstWrongCacheEntry == sortedRanges.length) { + if (firstWrongCacheEntry == rangeCount()) { return; } - if (cardinality.length < sortedRanges.length) { - long[] replacement = new long[sortedRanges.length]; + if (cardinality.length < rangeCount()) { + long[] replacement = new long[rangeCount()]; System.arraycopy(cardinality, 0, replacement, 0, cardinality.length); cardinality = replacement; } assert firstWrongCacheEntry >= 0 : this; long cumulative = firstWrongCacheEntry == 0 ? 0 : cardinality[firstWrongCacheEntry - 1]; - for (int i = firstWrongCacheEntry; i < sortedRanges.length; i++) { - cumulative += sortedRanges[i].size(); + for (int i = firstWrongCacheEntry; i < rangeCount(); i++) { + cumulative += sortedRanges.get(i).size(); this.cardinality[i] = cumulative; } - firstWrongCacheEntry = sortedRanges.length; - assert cardinality.length >= sortedRanges.length : this; + firstWrongCacheEntry = rangeCount(); + assert cardinality.length >= rangeCount() : this; } public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { @@ -475,19 +455,19 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { } long rangeToTake = nextPosRange.size(); - int pos = Arrays.binarySearch(cardinality, from, sortedRanges.length, nextPosRange.getFirst() + 1); + int pos = Arrays.binarySearch(cardinality, from, rangeCount(), nextPosRange.getFirst() + 1); long first; Range target; long offset; if (pos >= 0) { // Position matches the last item in the current range - target = sortedRanges[pos]; + target = sortedRanges.get(pos); offset = 1; } else { // Position matches an earlier item in pos = -pos - 1; - target = sortedRanges[pos]; + target = sortedRanges.get(pos); long c = cardinality[pos]; offset = c - nextPosRange.getFirst();// positive value to offset backwards from the end of target } @@ -503,10 +483,10 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { rangeToTake -= count; pos++; - if (pos >= sortedRanges.length) { + if (pos >= rangeCount()) { break; } - target = sortedRanges[pos]; + target = sortedRanges.get(pos); first = target.getFirst(); offset = target.size(); } @@ -520,16 +500,16 @@ public RangeSet subsetForPositions(RangeSet positions, boolean reversed) { public long get(long key) { if (key == 0) { - return sortedRanges[0].getFirst(); + return getFirstRow(); } ensureCardinalityCache(); int pos = Arrays.binarySearch(cardinality, key); if (pos >= 0) { - return sortedRanges[pos + 1].getFirst(); + return sortedRanges.get(pos + 1).getFirst(); } - Range target = sortedRanges[-pos - 1]; + Range target = sortedRanges.get(-pos - 1); long c = cardinality[-pos - 1]; long offset = c - key;// positive value to offset backwards from the end of target assert offset >= 0; @@ -538,6 +518,7 @@ public long get(long key) { /** * Removes all keys in the provided rangeset that are present in this. + * * @param other the rows to remove * @return any removed keys */ diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 54fe89c4855..81a4ea66eff 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -128,21 +128,24 @@ public void testAddExistingRange() { @Test public void testOverlappingRangesInDifferentOrder() { - // add three items in each possible order to a rangeset, ensure results are always the same + // add five ranges, where some overlap others, in each possible order to a rangeset, ensure results are always + // the same Range rangeA = new Range(100, 108); Range rangeB = new Range(105, 112); Range rangeC = new Range(110, 115); - Collections2.permutations(Arrays.asList(rangeA, rangeB, rangeC)).forEach(list -> { + Range rangeD = new Range(100, 113); + Range rangeE = new Range(101, 115); + Collections2.permutations(Arrays.asList(rangeA, rangeB, rangeC, rangeD, rangeE)).forEach(list -> { RangeSet rangeSet = new RangeSet(); list.forEach(rangeSet::addRange); - assertEquals(16, rangeSet.size()); + assertEquals(list.toString(), 16, rangeSet.size()); assertEquals(list.toString(), Collections.singletonList(new Range(100, 115)), asList(rangeSet)); }); - // same three items, but with another before that will not overlap with them + // same five items, but with another before that will not overlap with them Range before = new Range(0, 4); - Collections2.permutations(Arrays.asList(before, rangeA, rangeB, rangeC)).forEach(list -> { + Collections2.permutations(Arrays.asList(before, rangeA, rangeB, rangeC, rangeD, rangeE)).forEach(list -> { RangeSet rangeSet = new RangeSet(); list.forEach(rangeSet::addRange); @@ -150,9 +153,9 @@ public void testOverlappingRangesInDifferentOrder() { assertEquals(list.toString(), Arrays.asList(new Range(0, 4), new Range(100, 115)), asList(rangeSet)); }); - // same three items, but with another following that will not overlap with them + // same five items, but with another following that will not overlap with them Range after = new Range(200, 204); - Collections2.permutations(Arrays.asList(after, rangeA, rangeB, rangeC)).forEach(list -> { + Collections2.permutations(Arrays.asList(after, rangeA, rangeB, rangeC, rangeD, rangeE)).forEach(list -> { RangeSet rangeSet = new RangeSet(); list.forEach(rangeSet::addRange); From 0adf4c9ffdaeecf92ae9fd5b056a7cc1ab5437b6 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Wed, 4 Sep 2024 21:35:30 -0500 Subject: [PATCH 208/219] Avoid n^2 chunk type lookup, remove dead line --- .../web/client/api/barrage/data/WebBarrageSubscription.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 5f8fcc9a120..448fb0b2813 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -5,6 +5,7 @@ import elemental2.core.JsArray; import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.attributes.Values; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; @@ -42,9 +43,9 @@ public static WebBarrageSubscription subscribe(ClientTableState cts, ViewportCha DataChangedHandler dataChangedHandler) { WebColumnData[] dataSinks = new WebColumnData[cts.columnTypes().length]; + ChunkType[] chunkTypes = cts.chunkTypes(); for (int i = 0; i < dataSinks.length; i++) { - JsArray arr = JsData.newArray(cts.columnTypes()[i].getCanonicalName()); - switch (cts.chunkTypes()[i]) { + switch (chunkTypes[i]) { case Boolean: throw new IllegalStateException("Boolean unsupported here"); case Char: From 1e1153f015f937ae1a8fb68aefb0c11d9526de1e Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Thu, 5 Sep 2024 11:55:02 -0500 Subject: [PATCH 209/219] Add a method in RangeSet to shift all rows in one pass --- .../io/deephaven/web/shared/data/Range.java | 4 + .../deephaven/web/shared/data/RangeSet.java | 180 +++++++++++++++++- .../web/shared/data/ShiftedRange.java | 2 +- .../web/shared/data/RangeSetTest.java | 64 +++++++ 4 files changed, 243 insertions(+), 7 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index bf74f290d09..1f0e288923e 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -115,4 +115,8 @@ public String toString() { ", last=" + last + '}'; } + + public Range shift(long delta) { + return new Range(first + delta, last + delta); + } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index f25fe2057d4..e125ecdd708 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -40,21 +40,20 @@ public static RangeSet ofItems(long... items) { } public static RangeSet fromSortedRanges(Range[] sortedRanges) { - assert orderedAndNonOverlapping(sortedRanges) : Arrays.toString(sortedRanges); + assertOrderedAndNonOverlapping(sortedRanges); RangeSet rangeSet = new RangeSet(); rangeSet.sortedRanges.addAll(Arrays.asList(sortedRanges)); return rangeSet; } - private static boolean orderedAndNonOverlapping(Range[] sortedRanges) { + private static void assertOrderedAndNonOverlapping(Range[] sortedRanges) { long lastSeen = -1; for (int i = 0; i < sortedRanges.length; i++) { if (lastSeen >= sortedRanges[i].getFirst()) { - return false; + assert lastSeen == -1 : sortedRanges[i - 1] + " came before " + sortedRanges[i] + " (index=" + i + ")"; } lastSeen = sortedRanges[i].getLast(); } - return true; } private List sortedRanges = new ArrayList<>(); @@ -225,8 +224,7 @@ public void removeRange(Range range) { poisonCache(index); return; - } - if (remaining.length == 1) { + } else if (remaining.length == 1) { // swap shortened item and move on sortedRanges.set(index, remaining[0]); poisonCache(index); @@ -249,6 +247,176 @@ public void removeRange(Range range) { } } + private static class RangeAccumulator { + private final List replacement = new ArrayList<>(); + + public void appendRange(Range range) { + if (!replacement.isEmpty()) { + Range lastSeen = replacement.get(replacement.size() - 1); + Range overlap = lastSeen.overlap(range); + if (overlap != null) { + replacement.set(replacement.size() - 1, overlap); + } else { + replacement.add(range); + } + } else { + replacement.add(range); + } +// assert isSorted(); + } + + public void appendRanges(List ranges) { + appendRange(ranges.get(0)); + replacement.addAll(ranges.subList(0, ranges.size() - 1)); +// assert isSorted(); + } + + public void appendRanges(List ranges, long firstItemSubindex) { + Range first = ranges.get(0); + appendRange(new Range(first.getFirst() + firstItemSubindex, first.getLast())); + replacement.addAll(ranges.subList(0, ranges.size() - 1)); +// assert isSorted(); + } + + public List build() { +// assert isSorted(); + return replacement; + } + + private boolean isSorted() { + RangeSet r = new RangeSet(); + replacement.forEach(r::addRange); + return r.equals(RangeSet.fromSortedRanges(replacement.toArray(new Range[0]))); + } + } + + private static class RangeIterator { + private int index = -1; + private final List ranges; + private long key = 0; + + private RangeIterator(List ranges) { + this.ranges = ranges; + } + + public void advanceInCurrentRangeToKey(long key) { + assert key != 0; + this.key = key; + } + public boolean hasNext() { + return key == -1 || index < ranges.size() - 1; + } + public Range next() { + if (key != 0) { + Range r = ranges.get(index); + assert key > r.getFirst() && key <= r.getLast(); + r = new Range(key, r.getLast()); + key = 0; + + return r; + } + return ranges.get(++index); + } + + } + public void applyShifts(ShiftedRange[] shiftedRanges) { + if (shiftedRanges.length == 0 || isEmpty()) { + return; + } + RangeAccumulator newRanges = new RangeAccumulator(); + RangeIterator rangeIterator = new RangeIterator(sortedRanges); + Iterator shiftIterator = Arrays.asList(shiftedRanges).iterator(); + Range toCheck = rangeIterator.next(); + ShiftedRange shiftedRange = shiftIterator.next(); + do { + if (toCheck.getLast() < shiftedRange.getRange().getFirst()) { + // leave this range alone, the range to shift is after it + newRanges.appendRange(toCheck); + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } else if (toCheck.getFirst() > shiftedRange.getRange().getLast()) { + // skip the rest of this shift, the next range is after it + if (!shiftIterator.hasNext()) { + break; + } + shiftedRange = shiftIterator.next(); + } else { + Range[] remaining = toCheck.minus(shiftedRange.getRange()); + if (remaining.length == 0) { + // entire range shifted + newRanges.appendRange(toCheck.shift(shiftedRange.getDelta())); + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } else if (remaining.length == 1) { + Range remainingRange = remaining[0]; + + Range[] complimentArr = toCheck.minus(remainingRange); + assert complimentArr.length == 1; + Range compliment = complimentArr[0]; + if (remainingRange.compareTo(toCheck) > 0) { + // shift the compliment + newRanges.appendRange(compliment.shift(shiftedRange.getDelta())); + + // rest of the range still needs to be checked + rangeIterator.advanceInCurrentRangeToKey(remainingRange.getFirst()); + toCheck = rangeIterator.next(); + + // shift is consumed, move to the next one + if (!shiftIterator.hasNext()) { + break; + } + shiftedRange = shiftIterator.next(); + } else { + // keep the remaining section + newRanges.appendRange(remainingRange); + // leftovers after, shift the compliment + newRanges.appendRange(compliment.shift(shiftedRange.getDelta())); + + // look at the next range + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } + } else { + assert remaining.length == 2; + // We matched the entire shift range, plus a prefix and suffix + // First append the before section + newRanges.appendRange(remaining[0]); + // Then the entire shift range + newRanges.appendRange(shiftedRange.getResultRange()); + + // Visit the rest of the range next + rangeIterator.advanceInCurrentRangeToKey(remaining[1].getFirst()); + toCheck = rangeIterator.next(); + + if (!shiftIterator.hasNext()) { + break; + } + shiftedRange = shiftIterator.next(); + } + } + } while (true); + + // Grab remaining ranges + if (toCheck != null) { + newRanges.appendRange(toCheck); + while (rangeIterator.hasNext()) { + newRanges.appendRange(rangeIterator.next()); + } + } + + sortedRanges = newRanges.build(); + poisonCache(0); + } + /** * a new iterator over all indexes in this collection. * diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java index 0319f443394..f1d9e4848d6 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ShiftedRange.java @@ -33,7 +33,7 @@ public void setDelta(final long delta) { } public Range getResultRange() { - return new Range(range.getFirst() + delta, range.getLast() + delta); + return range.shift(delta); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 81a4ea66eff..07104d42e1e 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -568,4 +568,68 @@ public void testGet() { initialRange.removeRange(new Range(0, 1)); } + + @Test + public void testShift() { + RangeSet r = RangeSet.ofRange(0, 2); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(0, 2), 2) + }); + assertEquals(RangeSet.ofRange(2, 4), r); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(2, 6), -2) + }); + assertEquals(RangeSet.ofRange(0, 2), r); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(1, 2), 3) + }); + assertEquals(RangeSet.ofItems(0, 4, 5), r); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(4, 4), -1) + }); + assertEquals(RangeSet.ofItems(0, 3, 5), r); + + r = RangeSet.ofItems(0, 3, 4, 5, 6, 10); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(4, 4), 2), + new ShiftedRange(new Range(5, 6), 3), + }); + assertEquals(RangeSet.ofItems(0, 3, 6, 8, 9, 10), r); + + + r = RangeSet.fromSortedRanges(new Range[] { + new Range(0,1), + new Range(3,5), + new Range(7,13), + new Range(15, 19), + }); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(3,4), -1), + new ShiftedRange(new Range(7,13), -1), + new ShiftedRange(new Range(15, 17), -2), + }); + assertEquals(RangeSet.fromSortedRanges(new Range[] { + new Range(0, 3), + new Range(5, 15), + new Range(18, 19), + }), r); + + + r = RangeSet.fromSortedRanges(new Range[] { + new Range(28972, 28987), + new Range(28989, 29003), + new Range(29005, 29011), + new Range(29013, 29013), + new Range(29015, 29018), + new Range(29020, 29020), + new Range(29022, 29024), + new Range(29026, 29039), + }); + r.applyShifts(new ShiftedRange[] { + new ShiftedRange(new Range(28989, 29011), 2), + new ShiftedRange(new Range(29013, 29013), 1), + new ShiftedRange(new Range(29022, 29024), -1), + new ShiftedRange(new Range(29026, 29026), -2), + }); + } } From 9a993ce4c1dafd272c4ade0d7713def9b6800ed4 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 11:36:39 -0500 Subject: [PATCH 210/219] Start using shift code in subscription --- .../api/barrage/data/WebBarrageSubscription.java | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 448fb0b2813..deccbe877ca 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -256,10 +256,7 @@ public void applyUpdates(WebBarrageMessage message) { // Shift moved rows in the redir index boolean hasReverseShift = false; final ShiftedRange[] shiftedRanges = message.shifted; - RangeSetBulkHelper currentRowsetAddShifter = - new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); - RangeSetBulkHelper currentRowsetRemoveShifter = - new RangeSetBulkHelper(currentRowSet, RangeSetBulkHelper.Operation.APPEND); + currentRowSet.applyShifts(shiftedRanges); RangeSetBulkHelper populatedRowsetShifter = populatedRows == null ? null : new RangeSetBulkHelper(populatedRows, RangeSetBulkHelper.Operation.APPEND); for (int i = shiftedRanges.length - 1; i >= 0; --i) { @@ -269,8 +266,6 @@ public void applyUpdates(WebBarrageMessage message) { hasReverseShift = true; continue; } - currentRowsetRemoveShifter.appendRange(shiftedRange.getRange()); - currentRowsetAddShifter.appendRange(shiftedRange.getResultRange()); // test if shift is in populatedRows before continuing if (populatedRows != null) { @@ -300,8 +295,6 @@ public void applyUpdates(WebBarrageMessage message) { if (offset > 0) { continue; } - currentRowsetRemoveShifter.appendRange(shiftedRange.getRange()); - currentRowsetAddShifter.appendRange(shiftedRange.getResultRange()); if (populatedRows != null) { if (!populatedRows.includesAnyOf(shiftedRange.getRange())) { @@ -323,8 +316,6 @@ public void applyUpdates(WebBarrageMessage message) { } } } - currentRowsetAddShifter.flush(); - currentRowsetRemoveShifter.flush(); if (populatedRowsetShifter != null) { populatedRowsetShifter.flush(); } From ced9af6e53d73cd65ff5f324d0e53eab0d4c1418 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 11:46:33 -0500 Subject: [PATCH 211/219] Improved index iterator, removeRangeSet, addRangeSet impls --- .../api/barrage/CompressedRangeSetReader.java | 2 +- .../deephaven/web/shared/data/RangeSet.java | 326 +++++++++--------- 2 files changed, 164 insertions(+), 164 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java index e4f14ef2fd4..1ee0c147621 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java @@ -175,7 +175,7 @@ public RangeSet read(ByteBuffer data) { if (pending >= 0) { append(pending); } - return RangeSet.fromSortedRanges(sortedRanges.toArray(new Range[0])); + return RangeSet.fromSortedRanges(sortedRanges); default: throw new IllegalStateException("Bad command: " + command + " at position " + data.position()); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index e125ecdd708..013bc615ce8 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -39,6 +39,13 @@ public static RangeSet ofItems(long... items) { return rangeSet; } + public static RangeSet fromSortedRanges(List sortedRanges) { + assertOrderedAndNonOverlapping(sortedRanges.toArray(new Range[0])); + RangeSet rangeSet = new RangeSet(); + rangeSet.sortedRanges = sortedRanges; + return rangeSet; + } + public static RangeSet fromSortedRanges(Range[] sortedRanges) { assertOrderedAndNonOverlapping(sortedRanges); RangeSet rangeSet = new RangeSet(); @@ -62,189 +69,159 @@ private static void assertOrderedAndNonOverlapping(Range[] sortedRanges) { private long[] cardinality = new long[0]; public void addRangeSet(RangeSet rangeSet) { - if (rangeCount() == 0 && rangeSet.rangeCount() != 0) { + if (isEmpty() && !rangeSet.isEmpty()) { sortedRanges = new ArrayList<>(rangeSet.sortedRanges); poisonCache(0); - } else { - rangeSet.rangeIterator().forEachRemaining(this::addRange); - } - } - - public void addRange(Range range) { - // if empty, add as the only entry - if (rangeCount() == 0) { - sortedRanges.add(range); - poisonCache(0); - return; - } - // if one other entry, test if before, after, or overlapping - if (rangeCount() == 1) { - Range existing = sortedRanges.get(0); - Range overlap = range.overlap(existing); - if (overlap != null) { - sortedRanges.set(0, overlap); - poisonCache(0); - } else if (existing.compareTo(range) < 0) { - sortedRanges.add(range); - poisonCache(1); - } else { - assert existing.compareTo(range) > 0; - sortedRanges.add(0, range); - poisonCache(0); - } - return; - } + } else if (!rangeSet.isEmpty()) { + RangeAccumulator newRanges = new RangeAccumulator(); + Iterator rangeIterator = sortedRanges.iterator(); + Iterator addIterator = rangeSet.sortedRanges.iterator(); + + Range toCheck = rangeIterator.next(); + Range toAdd = addIterator.next(); + while (true) { + if (toCheck.getLast() < toAdd.getFirst()) { + newRanges.appendRange(toCheck); + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } else if (toCheck.getFirst() > toAdd.getLast()) { + newRanges.appendRange(toAdd); - // if more than one other entry, binarySearch to find before and after entry, and test both for overlapping - int index = Collections.binarySearch(sortedRanges, range); - if (index >= 0) { + if (!addIterator.hasNext()) { + toAdd = null; + break; + } + toAdd = addIterator.next(); + } else { + Range overlap = toCheck.overlap(toAdd); + assert overlap != null; + newRanges.appendRange(overlap); - // starting with that item, check to see if each following item is part of the existing range - // we know that no range before it will need to be considered, since the set should previously - // have been broken into non-contiguous ranges - Range merged = range; - int end = rangeCount() - 1; - for (int i = index; i < rangeCount(); i++) { - Range existing = sortedRanges.get(i); - // there is an item with the same start, either new item falls within it, or should replace it - Range overlap = existing.overlap(merged); - - if (overlap == null) { - // index before this one is the last item to be replaced - end = i - 1; - break; - } - if (overlap.equals(existing)) { - // the entire range to be added existed within an existing range, we're done - return; - } + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); - // grow the region used for replacing - merged = overlap; - } - // splice out [index, end] items, replacing with the newly grown overlap object (may be the same - // size, and only replacing one item) - sortedRanges.set(index, merged); - sortedRanges.subList(index + 1, end + 1).clear(); - poisonCache(index); - } else { - int proposedIndex = -(index) - 1; - Range merged = range; - // test the item before the proposed location (if any), try to merge it - if (proposedIndex > 0) { - Range before = sortedRanges.get(proposedIndex - 1); - Range overlap = before.overlap(merged); - if (overlap != null) { - // replace the range that we are merging, and start the slice here instead - merged = overlap; - proposedIndex--; - // TODO this will make the loop start here, considering this item twice. not ideal, but not a big - // deal either + if (!addIterator.hasNext()) { + toAdd = null; + break; + } + toAdd = addIterator.next(); } } - // "end" represents the last item that needs to be merged in to the newly added item. if no items are to be - // merged in, then end will be proposedIndex-1, meaning nothing gets merged in, and the array will grow - // instead of shrinking. - // if we never find an item we cannot merge with, the end of the replaced range is the last item of the old - // array, which could result in the new array having as little as only 1 item - int end = rangeCount() - 1; - // until we quit finding matches, test subsequent items - for (int i = proposedIndex; i < rangeCount(); i++) { - Range existing = sortedRanges.get(i); - Range overlap = existing.overlap(merged); - if (overlap == null) { - // stop at the item before this one - end = i - 1; - break; + + // Grab remaining ranges + if (toCheck != null) { + assert toAdd == null; + newRanges.appendRange(toCheck); + while (rangeIterator.hasNext()) { + newRanges.appendRange(rangeIterator.next()); } - merged = overlap; - } - int newLength = rangeCount() - (end - proposedIndex); - assert newLength > 0 && newLength <= rangeCount() + 1; - if (end == proposedIndex) { - sortedRanges.set(proposedIndex, merged); - } else if (newLength < rangeCount()) { - sortedRanges.set(proposedIndex, merged); - sortedRanges.subList(proposedIndex + 1, end + 1).clear(); } else { - sortedRanges.add(proposedIndex, merged); + assert toAdd != null; + newRanges.appendRange(toAdd); + while (addIterator.hasNext()) { + newRanges.appendRange(addIterator.next()); + } } - poisonCache(proposedIndex); + + this.sortedRanges = newRanges.build(); + poisonCache(0); } } - public void removeRangeSet(RangeSet rangeSet) { - rangeSet.rangeIterator().forEachRemaining(this::removeRange); + public void addRange(Range range) { + addRangeSet(RangeSet.fromSortedRanges(Collections.singletonList(range))); } - public void removeRange(Range range) { - // if empty, nothing to do - if (rangeCount() == 0) { + public void removeRangeSet(RangeSet rangeSet) { + if (isEmpty() || rangeSet.isEmpty()) { return; } - // search the sorted list of ranges and find where the current range starts. two case here when using - // binarySearch, either the removed range starts in the same place as an existing range starts, or - // it starts before an item (and so we check the item before and the item after) - int index = Collections.binarySearch(sortedRanges, range); - if (index < 0) { - // adjusted index notes where the item would be if it were added, minus _one more_ to see if - // it overlaps the item before it. To compute "the position where the new item belongs", we - // would do (-index - 1), so to examine one item prior to that we'll subtract one more. Then, - // to confirm that we are inserting in a valid position, take the max of that value and zero. - index = Math.max(0, -index - 2); - } - - int beforeCount = -1; - int toRemove = 0; - for (; index < rangeCount(); index++) { - Range toCheck = sortedRanges.get(index); - if (toCheck.getFirst() > range.getLast()) { - break;// done, this is entirely after the range we're removing - } - if (toCheck.getLast() < range.getFirst()) { - continue;// skip, we don't overlap at all yet - } - Range[] remaining = toCheck.minus(range); - assert remaining != null : "Only early ranges are allowed to not match at all"; - - if (remaining.length == 2) { - // Removed region is entirely within the range we are checking: - // Splice in the one extra item and we're done - this entry - // both started before and ended after the removed section, - // so we don't even "break", we just return - assert toCheck.getFirst() < range.getFirst() : "Expected " + range + " to start after " + toCheck; - assert toCheck.getLast() > range.getLast() : "Expected " + range + " to end after " + toCheck; - assert toRemove == 0 && beforeCount == -1 - : "Expected that no previous items in the RangeSet had been removed toRemove=" + toRemove - + ", beforeCount=" + beforeCount; - - sortedRanges.set(index, remaining[0]); - sortedRanges.add(index + 1, remaining[1]); - poisonCache(index); - - return; - } else if (remaining.length == 1) { - // swap shortened item and move on - sortedRanges.set(index, remaining[0]); - poisonCache(index); + RangeAccumulator newRanges = new RangeAccumulator(); + RangeIterator rangeIterator = new RangeIterator(sortedRanges); + Iterator removeIterator = rangeSet.sortedRanges.iterator(); + + Range toCheck = rangeIterator.next(); + Range toRemove = removeIterator.next(); + while (true) { + if (toCheck.getLast() < toRemove.getFirst()) { + newRanges.appendRange(toCheck); + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } else if (toCheck.getFirst() > toRemove.getLast()) { + if (!removeIterator.hasNext()) { + break; + } + toRemove = removeIterator.next(); } else { - assert remaining.length == 0 : "Array contains a surprising number of items: " + remaining.length; + Range[] remaining = toCheck.minus(toRemove); + if (remaining.length == 0) { + // entire range removed, advance to the next range to check + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } else if (remaining.length == 1) { + Range remainingRange = remaining[0]; + if (remainingRange.compareTo(toCheck) > 0) { + // unremoved range still needs to be checked + rangeIterator.advanceInCurrentRangeToKey(remainingRange.getFirst()); + toCheck = rangeIterator.next(); + + if (!removeIterator.hasNext()) { + break; + } + toRemove = removeIterator.next(); + } else { + // keep the leading, remaining section + newRanges.appendRange(remainingRange); + + // look at the next range + if (!rangeIterator.hasNext()) { + toCheck = null; + break; + } + toCheck = rangeIterator.next(); + } + } else { + assert remaining.length == 2; + newRanges.appendRange(remaining[0]); + + rangeIterator.advanceInCurrentRangeToKey(remaining[1].getFirst()); + toCheck = rangeIterator.next(); - // splice out this item as nothing exists here any more and move on - if (toRemove == 0) { - beforeCount = index; + if (!removeIterator.hasNext()) { + break; + } + toRemove = removeIterator.next(); } - toRemove++; } - } - if (toRemove > 0) { - sortedRanges.subList(beforeCount, beforeCount + toRemove).clear(); - poisonCache(beforeCount); - } else { - assert beforeCount == -1 : "No items to remove, but beforeCount set?"; + + // Grab remaining ranges + if (toCheck != null) { + newRanges.appendRange(toCheck); + while (rangeIterator.hasNext()) { + newRanges.appendRange(rangeIterator.next()); + } } + + this.sortedRanges = newRanges.build(); + poisonCache(0); + } + + public void removeRange(Range range) { + removeRangeSet(RangeSet.fromSortedRanges(Collections.singletonList(range))); } private static class RangeAccumulator { @@ -427,10 +404,33 @@ public Iterator rangeIterator() { } public PrimitiveIterator.OfLong indexIterator() { - return sortedRanges - .stream() - .flatMapToLong(range -> LongStream.rangeClosed(range.getFirst(), range.getLast())) - .iterator(); + if (isEmpty()) { + return LongStream.empty().iterator(); + } + return new PrimitiveIterator.OfLong() { + private int rangeIndex = 0; + private Range current = sortedRanges.get(0); + private long offsetInRange = 0; + @Override + public long nextLong() { + long value = current.getFirst() + offsetInRange; + if (++offsetInRange >= current.size()) { + rangeIndex++; + offsetInRange = 0; + if (rangeIndex < rangeCount()) { + current = sortedRanges.get(rangeIndex); + } else { + current = null; + } + } + return value; + } + + @Override + public boolean hasNext() { + return rangeIndex < rangeCount(); + } + }; } public int rangeCount() { From fe65560d4b050ab19d373c64b6fe81df23eadab9 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 12:05:44 -0500 Subject: [PATCH 212/219] Remove useless asserts, and migrate an error closer to API --- .../java/io/deephaven/web/client/api/JsRangeSet.java | 3 +++ .../java/io/deephaven/web/shared/data/Range.java | 12 ------------ 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java index 50e09604c73..85872589483 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java @@ -24,6 +24,9 @@ public class JsRangeSet { private final RangeSet range; public static JsRangeSet ofRange(double first, double last) { + if (first > last) { + throw new IllegalStateException(first + " > " + last); + } return new JsRangeSet(RangeSet.ofRange((long) first, (long) last)); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index 1f0e288923e..87144a989f1 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -4,7 +4,6 @@ package io.deephaven.web.shared.data; import javax.annotation.Nonnull; -import java.io.Serializable; /** * Describes a contiguous range of at least one item. Equals/hashcode compare both start and end, but comparing Range @@ -15,15 +14,7 @@ public class Range implements Comparable { private final long first; private final long last; - // serialization - Range() { - this(0, 0); - } - public Range(long first, long last) { - if (first > last) { - throw new IllegalStateException(first + " > " + last); - } this.first = first; this.last = last; } @@ -70,13 +61,10 @@ public Range[] minus(Range range) { // otherwise either the subtracted section's start is within our range _or_ its end is within our range, // and we can use that to only produce the one range we need to return if (range.first <= first) { - assert range.last >= first : "removed range expected to not end before existing range"; return new Range[] { new Range(range.last + 1, last) }; } else { - assert range.last >= last : "removed range expected to end by the end of the existing range"; - assert range.first <= last : "removed range expected to start before existing range"; return new Range[] { new Range(first, range.first - 1) }; From d409b26347a75523bb1307a555c1e715718f6b0f Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 12:12:25 -0500 Subject: [PATCH 213/219] spotless --- .../java/io/deephaven/web/shared/data/RangeSet.java | 12 ++++++++---- .../io/deephaven/web/shared/data/RangeSetTest.java | 10 +++++----- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 013bc615ce8..6d34bd85c86 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -239,24 +239,24 @@ public void appendRange(Range range) { } else { replacement.add(range); } -// assert isSorted(); + // assert isSorted(); } public void appendRanges(List ranges) { appendRange(ranges.get(0)); replacement.addAll(ranges.subList(0, ranges.size() - 1)); -// assert isSorted(); + // assert isSorted(); } public void appendRanges(List ranges, long firstItemSubindex) { Range first = ranges.get(0); appendRange(new Range(first.getFirst() + firstItemSubindex, first.getLast())); replacement.addAll(ranges.subList(0, ranges.size() - 1)); -// assert isSorted(); + // assert isSorted(); } public List build() { -// assert isSorted(); + // assert isSorted(); return replacement; } @@ -280,9 +280,11 @@ public void advanceInCurrentRangeToKey(long key) { assert key != 0; this.key = key; } + public boolean hasNext() { return key == -1 || index < ranges.size() - 1; } + public Range next() { if (key != 0) { Range r = ranges.get(index); @@ -296,6 +298,7 @@ public Range next() { } } + public void applyShifts(ShiftedRange[] shiftedRanges) { if (shiftedRanges.length == 0 || isEmpty()) { return; @@ -411,6 +414,7 @@ public PrimitiveIterator.OfLong indexIterator() { private int rangeIndex = 0; private Range current = sortedRanges.get(0); private long offsetInRange = 0; + @Override public long nextLong() { long value = current.getFirst() + offsetInRange; diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 07104d42e1e..a2c2522b2d9 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -598,14 +598,14 @@ public void testShift() { r = RangeSet.fromSortedRanges(new Range[] { - new Range(0,1), - new Range(3,5), - new Range(7,13), + new Range(0, 1), + new Range(3, 5), + new Range(7, 13), new Range(15, 19), }); r.applyShifts(new ShiftedRange[] { - new ShiftedRange(new Range(3,4), -1), - new ShiftedRange(new Range(7,13), -1), + new ShiftedRange(new Range(3, 4), -1), + new ShiftedRange(new Range(7, 13), -1), new ShiftedRange(new Range(15, 17), -2), }); assertEquals(RangeSet.fromSortedRanges(new Range[] { From 00b4ec6d85b767d5a323d6cdadebaf772b4b2bf5 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 13:24:28 -0500 Subject: [PATCH 214/219] Remove unnecessary imports --- .../web/client/api/barrage/data/WebBarrageSubscription.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index deccbe877ca..494345a7bba 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -3,13 +3,11 @@ // package io.deephaven.web.client.api.barrage.data; -import elemental2.core.JsArray; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.attributes.Values; import io.deephaven.web.client.api.barrage.WebBarrageMessage; import io.deephaven.web.client.api.barrage.def.InitialTableDefinition; -import io.deephaven.web.client.fu.JsData; import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.Range; import io.deephaven.web.shared.data.RangeSet; From 4bca663f2de0f0cd9a88db9f03ffe27eb0b94ead Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 16:14:47 -0500 Subject: [PATCH 215/219] Let the server decide if client should flatten for viewports --- .../src/main/resources/dh-defaults.prop | 5 +++- .../web/client/api/WorkerConnection.java | 17 +++++++---- .../TableViewportSubscription.java | 28 +++++++++++-------- 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/props/configs/src/main/resources/dh-defaults.prop b/props/configs/src/main/resources/dh-defaults.prop index 877a9f8d056..d3e0c6d2ede 100644 --- a/props/configs/src/main/resources/dh-defaults.prop +++ b/props/configs/src/main/resources/dh-defaults.prop @@ -57,12 +57,15 @@ web.storage.notebook.directory=/notebooks web.webgl=true web.webgl.editable=true +# Default to not flattening web viewports, but allow it as an option +web.flattenViewports=false + # List of configuration properties to provide to unauthenticated clients, so that they can decide how best to prove their # identity to the server. authentication.client.configuration.list=AuthHandlers # List of configuration properties to provide to authenticated clients, so they can interact with the server. -client.configuration.list=java.version,deephaven.version,barrage.version,groovy.version,python.version,http.session.durationMs,file.separator,web.storage.layout.directory,web.storage.notebook.directory,web.webgl,web.webgl.editable +client.configuration.list=java.version,deephaven.version,barrage.version,groovy.version,python.version,http.session.durationMs,file.separator,web.storage.layout.directory,web.storage.notebook.directory,web.webgl,web.webgl.editable,web.flattenViewports # Version list to add to the configuration property list. Each `=`-delimited pair denotes a short name for a versioned # jar, and a class that is found in that jar. Any such keys will be made available to the client.configuration.list diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index 07fb6ff798f..239dd2efbd0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -23,6 +23,7 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb.FieldsChangeUpdate; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb.ListFieldsRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.application_pb_service.ApplicationServiceClient; +import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.config_pb.ConfigValue; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.config_pb.ConfigurationConstantsRequest; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.config_pb.ConfigurationConstantsResponse; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.config_pb_service.ConfigService; @@ -199,6 +200,8 @@ private enum State { private Map knownFields = new HashMap<>(); private ResponseStreamWrapper fieldsChangeUpdateStream; + private ConfigurationConstantsResponse constants; + public WorkerConnection(QueryConnectable info) { this.info = info; this.config = new ClientConfiguration(); @@ -479,11 +482,11 @@ this, ConfigService.GetConfigurationConstants, new ConfigurationConstantsRequest } // Read the timeout from the server, we'll refresh at less than that - result.getMessage().getConfigValuesMap().forEach((item, key) -> { - if (key.equals("http.session.durationMs")) { - sessionTimeoutMs = Double.parseDouble(item.getStringValue()); - } - }); + constants = result.getMessage(); + ConfigValue sessionDuration = constants.getConfigValuesMap().get("http.session.durationMs"); + if (sessionDuration != null && sessionDuration.hasStringValue()) { + sessionTimeoutMs = Double.parseDouble(sessionDuration.getStringValue()); + } // schedule an update based on our currently configured delay scheduledAuthUpdate = DomGlobal.setTimeout(ignore -> { @@ -1326,6 +1329,10 @@ public ClientConfiguration getConfig() { return config; } + public ConfigValue getServerConfigValue(String key) { + return constants.getConfigValuesMap().get(key); + } + public void onOpen(BiConsumer callback) { switch (state) { case Connected: diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index e764f9adfe8..203dc0c7890 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -15,6 +15,7 @@ import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.ColumnConversionMode; import io.deephaven.javascript.proto.dhinternal.arrow.flight.protocol.flight_pb.FlightData; +import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.config_pb.ConfigValue; import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.FlattenRequest; import io.deephaven.util.mutable.MutableLong; import io.deephaven.web.client.api.Column; @@ -89,19 +90,22 @@ public static TableViewportSubscription make(double firstRow, double lastRow, Co ClientTableState tableState = existingTable.state(); WorkerConnection connection = existingTable.getConnection(); - ClientTableState flattenedState = connection.newState((callback, newState, metadata) -> { - FlattenRequest flatten = new FlattenRequest(); - flatten.setSourceId(tableState.getHandle().makeTableReference()); - flatten.setResultId(newState.getHandle().makeTicket()); - connection.tableServiceClient().flatten(flatten, metadata, callback::apply); - }, "flatten"); - flattenedState.refetch(null, connection.metadata()).then(result -> { - return null; - }, err -> { - return null; - }); + final ClientTableState stateToSubscribe; + ConfigValue flattenViewport = connection.getServerConfigValue("web.flattenViewports"); + if (flattenViewport != null && flattenViewport.hasStringValue() + && "true".equalsIgnoreCase(flattenViewport.getStringValue())) { + stateToSubscribe = connection.newState((callback, newState, metadata) -> { + FlattenRequest flatten = new FlattenRequest(); + flatten.setSourceId(tableState.getHandle().makeTableReference()); + flatten.setResultId(newState.getHandle().makeTicket()); + connection.tableServiceClient().flatten(flatten, metadata, callback::apply); + }, "flatten"); + stateToSubscribe.refetch(null, connection.metadata()).then(result -> null, err -> null); + } else { + stateToSubscribe = tableState; + } - TableViewportSubscription sub = new TableViewportSubscription(flattenedState, connection, existingTable); + TableViewportSubscription sub = new TableViewportSubscription(stateToSubscribe, connection, existingTable); sub.setInternalViewport(firstRow, lastRow, columns, updateIntervalMs, false); return sub; } From 6b4054f93e964a74169427ce5b4ba3a64c6fd011 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Fri, 6 Sep 2024 16:15:14 -0500 Subject: [PATCH 216/219] Clean up RangeSet a bit for merge --- .../io/deephaven/web/shared/data/RangeSet.java | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 6d34bd85c86..45ec23f7830 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -56,9 +56,8 @@ public static RangeSet fromSortedRanges(Range[] sortedRanges) { private static void assertOrderedAndNonOverlapping(Range[] sortedRanges) { long lastSeen = -1; for (int i = 0; i < sortedRanges.length; i++) { - if (lastSeen >= sortedRanges[i].getFirst()) { - assert lastSeen == -1 : sortedRanges[i - 1] + " came before " + sortedRanges[i] + " (index=" + i + ")"; - } + assert lastSeen < sortedRanges[i].getFirst() || lastSeen == -1 + : sortedRanges[i - 1] + " came before " + sortedRanges[i] + " (index=" + i + ")"; lastSeen = sortedRanges[i].getLast(); } } @@ -239,32 +238,22 @@ public void appendRange(Range range) { } else { replacement.add(range); } - // assert isSorted(); } public void appendRanges(List ranges) { appendRange(ranges.get(0)); replacement.addAll(ranges.subList(0, ranges.size() - 1)); - // assert isSorted(); } public void appendRanges(List ranges, long firstItemSubindex) { Range first = ranges.get(0); appendRange(new Range(first.getFirst() + firstItemSubindex, first.getLast())); replacement.addAll(ranges.subList(0, ranges.size() - 1)); - // assert isSorted(); } public List build() { - // assert isSorted(); return replacement; } - - private boolean isSorted() { - RangeSet r = new RangeSet(); - replacement.forEach(r::addRange); - return r.equals(RangeSet.fromSortedRanges(replacement.toArray(new Range[0]))); - } } private static class RangeIterator { From 6f74b22ad0f4e8f59514a6310d248657e0201baf Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 9 Sep 2024 12:22:53 -0500 Subject: [PATCH 217/219] Correctly notify UI of failures in subscriptions --- .../AbstractTableSubscription.java | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java index f0e5741b01e..155dbb8271a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/AbstractTableSubscription.java @@ -30,6 +30,7 @@ import io.deephaven.web.client.state.ClientTableState; import io.deephaven.web.shared.data.RangeSet; import io.deephaven.web.shared.data.ShiftedRange; +import io.deephaven.web.shared.fu.JsRunnable; import jsinterop.annotations.JsProperty; import jsinterop.base.Any; import jsinterop.base.Js; @@ -85,6 +86,8 @@ public enum Status { protected Status status = Status.STARTING; + private String failMsg; + public AbstractTableSubscription(ClientTableState state, WorkerConnection connection) { state.retain(this); this.state = state; @@ -100,6 +103,7 @@ public AbstractTableSubscription(ClientTableState state, WorkerConnection connec */ protected void revive() { // Once the state is running, set up the actual subscription + // Don't let subscription be used again, table failed and user will have already gotten an error elsewhere state.onRunning(s -> { if (status != Status.STARTING) { // already closed @@ -124,10 +128,11 @@ protected void revive() { doExchange.onEnd(this::onStreamEnd); sendFirstSubscriptionRequest(); - }, () -> { - // Don't let subscription be used again, table failed and user will have already gotten an error elsewhere - status = Status.DONE; - }); + }, + // If the upstream table fails, kill the subscription + this::fail, + // If the upstream table is closed, its because this subscription released it, do nothing + JsRunnable.doNothing()); } public Status getStatus() { @@ -161,7 +166,13 @@ protected static FlatBufferBuilder subscriptionRequest(byte[] tableTicket, BitSe protected void sendBarrageSubscriptionRequest(RangeSet viewport, JsArray columns, Double updateIntervalMs, boolean isReverseViewport) { - assert status == Status.ACTIVE || status == Status.PENDING_UPDATE : status; + if (status == Status.DONE) { + if (failMsg == null) { + throw new IllegalStateException("Can't change subscription, already closed"); + } else { + throw new IllegalStateException("Can't change subscription, already failed: " + failMsg); + } + } status = Status.PENDING_UPDATE; this.columns = columns; this.viewportRowSet = viewport; @@ -471,12 +482,25 @@ private void onFlightData(FlightData data) { } protected void onStreamEnd(ResponseStreamWrapper.Status status) { - if (this.status != Status.DONE && status.isTransportError()) { + if (this.status == Status.DONE) { + return; + } + if (status.isTransportError()) { // If the subscription isn't closed and we hit a transport error, allow it to restart this.status = Status.STARTING; + } else { + // Subscription failed somehow, fire an event + fail(status.getDetails()); } } + private void fail(String message) { + failureHandled(message); + this.status = Status.DONE; + doExchange = null; + failMsg = message; + } + /** * The columns that were subscribed to when this subscription was created * From 261334b8bc2b016a4cb8f07ec807818b7bc84218 Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Mon, 9 Sep 2024 14:36:05 -0500 Subject: [PATCH 218/219] Switch to release version of flatbuffers-gwt --- .../src/main/groovy/io.deephaven.repository-conventions.gradle | 3 --- gradle/libs.versions.toml | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle index 828ed7c6b33..1deccf352c0 100644 --- a/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle +++ b/buildSrc/src/main/groovy/io.deephaven.repository-conventions.gradle @@ -1,8 +1,5 @@ repositories { mavenCentral() - maven { - url 'https://oss.sonatype.org/content/repositories/snapshots/' - } maven { url 'https://jitpack.io' content { diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index b3d0015743f..e02cdb202c5 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -77,7 +77,7 @@ trove = "3.0.3" undercouch = "2.15.1" univocity = "2.6.0" vertispan-nio = "1.0-alpha-2" -vertispan-flatbuffers-gwt = "1.12.0-1-SNAPSHOT" +vertispan-flatbuffers-gwt = "1.12.0-1" vertispan-ts-defs = "1.0.0-RC4" # test versions From 5d1d90b260a339931330c48a089ee8bb3eabb63a Mon Sep 17 00:00:00 2001 From: Colin Alworth Date: Tue, 10 Sep 2024 06:15:43 -0500 Subject: [PATCH 219/219] Clear old data when handling blink data --- .../web/client/api/barrage/data/WebBarrageSubscription.java | 2 +- .../src/main/java/io/deephaven/web/shared/data/RangeSet.java | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java index 494345a7bba..a46d8ec7f90 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/data/WebBarrageSubscription.java @@ -199,7 +199,7 @@ public void applyUpdates(WebBarrageMessage message) { } } - currentRowSet.removeRangeSet(message.rowsRemoved); + currentRowSet.clear(); currentRowSet.addRangeSet(message.rowsAdded); state.setSize(message.rowsAdded.size()); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 45ec23f7830..3e77c9ccec8 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -223,6 +223,11 @@ public void removeRange(Range range) { removeRangeSet(RangeSet.fromSortedRanges(Collections.singletonList(range))); } + public void clear() { + sortedRanges.clear(); + poisonCache(0); + } + private static class RangeAccumulator { private final List replacement = new ArrayList<>();