@useairfoil/flight 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,231 @@
1
+ // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2
+ // versions:
3
+ // protoc-gen-ts_proto v2.6.1
4
+ // protoc unknown
5
+ // source: google/protobuf/timestamp.proto
6
+
7
+ /* eslint-disable */
8
+ import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9
+ import { messageTypeRegistry } from "../../typeRegistry.js";
10
+
11
+ export const protobufPackage = "google.protobuf";
12
+
13
+ /**
14
+ * A Timestamp represents a point in time independent of any time zone or local
15
+ * calendar, encoded as a count of seconds and fractions of seconds at
16
+ * nanosecond resolution. The count is relative to an epoch at UTC midnight on
17
+ * January 1, 1970, in the proleptic Gregorian calendar which extends the
18
+ * Gregorian calendar backwards to year one.
19
+ *
20
+ * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
21
+ * second table is needed for interpretation, using a [24-hour linear
22
+ * smear](https://developers.google.com/time/smear).
23
+ *
24
+ * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
25
+ * restricting to that range, we ensure that we can convert to and from [RFC
26
+ * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
27
+ *
28
+ * # Examples
29
+ *
30
+ * Example 1: Compute Timestamp from POSIX `time()`.
31
+ *
32
+ * Timestamp timestamp;
33
+ * timestamp.set_seconds(time(NULL));
34
+ * timestamp.set_nanos(0);
35
+ *
36
+ * Example 2: Compute Timestamp from POSIX `gettimeofday()`.
37
+ *
38
+ * struct timeval tv;
39
+ * gettimeofday(&tv, NULL);
40
+ *
41
+ * Timestamp timestamp;
42
+ * timestamp.set_seconds(tv.tv_sec);
43
+ * timestamp.set_nanos(tv.tv_usec * 1000);
44
+ *
45
+ * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
46
+ *
47
+ * FILETIME ft;
48
+ * GetSystemTimeAsFileTime(&ft);
49
+ * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
50
+ *
51
+ * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
52
+ * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
53
+ * Timestamp timestamp;
54
+ * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
55
+ * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
56
+ *
57
+ * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
58
+ *
59
+ * long millis = System.currentTimeMillis();
60
+ *
61
+ * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
62
+ * .setNanos((int) ((millis % 1000) * 1000000)).build();
63
+ *
64
+ * Example 5: Compute Timestamp from Java `Instant.now()`.
65
+ *
66
+ * Instant now = Instant.now();
67
+ *
68
+ * Timestamp timestamp =
69
+ * Timestamp.newBuilder().setSeconds(now.getEpochSecond())
70
+ * .setNanos(now.getNano()).build();
71
+ *
72
+ * Example 6: Compute Timestamp from current time in Python.
73
+ *
74
+ * timestamp = Timestamp()
75
+ * timestamp.GetCurrentTime()
76
+ *
77
+ * # JSON Mapping
78
+ *
79
+ * In JSON format, the Timestamp type is encoded as a string in the
80
+ * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
81
+ * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
82
+ * where {year} is always expressed using four digits while {month}, {day},
83
+ * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
84
+ * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
85
+ * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
86
+ * is required. A proto3 JSON serializer should always use UTC (as indicated by
87
+ * "Z") when printing the Timestamp type and a proto3 JSON parser should be
88
+ * able to accept both UTC and other timezones (as indicated by an offset).
89
+ *
90
+ * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
91
+ * 01:30 UTC on January 15, 2017.
92
+ *
93
+ * In JavaScript, one can convert a Date object to this format using the
94
+ * standard
95
+ * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
96
+ * method. In Python, a standard `datetime.datetime` object can be converted
97
+ * to this format using
98
+ * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
99
+ * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
100
+ * the Joda Time's [`ISODateTimeFormat.dateTime()`](
101
+ * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
102
+ * ) to obtain a formatter capable of generating timestamps in this format.
103
+ */
104
+ export interface Timestamp {
105
+ $type: "google.protobuf.Timestamp";
106
+ /**
107
+ * Represents seconds of UTC time since Unix epoch
108
+ * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
109
+ * 9999-12-31T23:59:59Z inclusive.
110
+ */
111
+ readonly seconds: bigint;
112
+ /**
113
+ * Non-negative fractions of a second at nanosecond resolution. Negative
114
+ * second values with fractions must still have non-negative nanos values
115
+ * that count forward in time. Must be from 0 to 999,999,999
116
+ * inclusive.
117
+ */
118
+ readonly nanos: number;
119
+ }
120
+
121
+ function createBaseTimestamp(): Timestamp {
122
+ return { $type: "google.protobuf.Timestamp", seconds: 0n, nanos: 0 };
123
+ }
124
+
125
+ export const Timestamp: MessageFns<Timestamp, "google.protobuf.Timestamp"> = {
126
+ $type: "google.protobuf.Timestamp" as const,
127
+
128
+ encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
129
+ if (message.seconds !== 0n) {
130
+ if (BigInt.asIntN(64, message.seconds) !== message.seconds) {
131
+ throw new globalThis.Error("value provided for field message.seconds of type int64 too large");
132
+ }
133
+ writer.uint32(8).int64(message.seconds);
134
+ }
135
+ if (message.nanos !== 0) {
136
+ writer.uint32(16).int32(message.nanos);
137
+ }
138
+ return writer;
139
+ },
140
+
141
+ decode(input: BinaryReader | Uint8Array, length?: number): Timestamp {
142
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
143
+ let end = length === undefined ? reader.len : reader.pos + length;
144
+ const message = createBaseTimestamp() as any;
145
+ while (reader.pos < end) {
146
+ const tag = reader.uint32();
147
+ switch (tag >>> 3) {
148
+ case 1: {
149
+ if (tag !== 8) {
150
+ break;
151
+ }
152
+
153
+ message.seconds = reader.int64() as bigint;
154
+ continue;
155
+ }
156
+ case 2: {
157
+ if (tag !== 16) {
158
+ break;
159
+ }
160
+
161
+ message.nanos = reader.int32();
162
+ continue;
163
+ }
164
+ }
165
+ if ((tag & 7) === 4 || tag === 0) {
166
+ break;
167
+ }
168
+ reader.skip(tag & 7);
169
+ }
170
+ return message;
171
+ },
172
+
173
+ fromJSON(object: any): Timestamp {
174
+ return {
175
+ $type: Timestamp.$type,
176
+ seconds: isSet(object.seconds) ? BigInt(object.seconds) : 0n,
177
+ nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
178
+ };
179
+ },
180
+
181
+ toJSON(message: Timestamp): unknown {
182
+ const obj: any = {};
183
+ if (message.seconds !== 0n) {
184
+ obj.seconds = message.seconds.toString();
185
+ }
186
+ if (message.nanos !== 0) {
187
+ obj.nanos = Math.round(message.nanos);
188
+ }
189
+ return obj;
190
+ },
191
+
192
+ create<I extends Exact<DeepPartial<Timestamp>, I>>(base?: I): Timestamp {
193
+ return Timestamp.fromPartial(base ?? ({} as any));
194
+ },
195
+ fromPartial<I extends Exact<DeepPartial<Timestamp>, I>>(object: I): Timestamp {
196
+ const message = createBaseTimestamp() as any;
197
+ message.seconds = object.seconds ?? 0n;
198
+ message.nanos = object.nanos ?? 0;
199
+ return message;
200
+ },
201
+ };
202
+
203
+ messageTypeRegistry.set(Timestamp.$type, Timestamp);
204
+
205
+ type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined;
206
+
207
+ export type DeepPartial<T> = T extends Builtin ? T
208
+ : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
209
+ : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
210
+ : T extends { readonly $case: string }
211
+ ? { [K in keyof Omit<T, "$case">]?: DeepPartial<T[K]> } & { readonly $case: T["$case"] }
212
+ : T extends {} ? { [K in Exclude<keyof T, "$type">]?: DeepPartial<T[K]> }
213
+ : Partial<T>;
214
+
215
+ type KeysOfUnion<T> = T extends T ? keyof T : never;
216
+ export type Exact<P, I extends P> = P extends Builtin ? P
217
+ : P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P> | "$type">]: never };
218
+
219
+ function isSet(value: any): boolean {
220
+ return value !== null && value !== undefined;
221
+ }
222
+
223
+ export interface MessageFns<T, V extends string> {
224
+ readonly $type: V;
225
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
226
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
227
+ fromJSON(object: any): T;
228
+ toJSON(message: T): unknown;
229
+ create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
230
+ fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
231
+ }
@@ -0,0 +1,29 @@
1
+ // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2
+ // versions:
3
+ // protoc-gen-ts_proto v2.6.1
4
+ // protoc unknown
5
+
6
+ /* eslint-disable */
7
+ import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
8
+
9
+ export interface MessageType<Message extends UnknownMessage = UnknownMessage> {
10
+ $type: Message["$type"];
11
+ encode(message: Message, writer?: BinaryWriter): BinaryWriter;
12
+ decode(input: BinaryReader | Uint8Array, length?: number): Message;
13
+ fromJSON(object: any): Message;
14
+ toJSON(message: Message): unknown;
15
+ fromPartial(object: DeepPartial<Message>): Message;
16
+ }
17
+
18
+ export type UnknownMessage = { $type: string };
19
+
20
+ export const messageTypeRegistry = new Map<string, MessageType>();
21
+
22
+ type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined;
23
+ export type DeepPartial<T> = T extends Builtin ? T
24
+ : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
25
+ : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
26
+ : T extends { readonly $case: string }
27
+ ? { [K in keyof Omit<T, "$case">]?: DeepPartial<T[K]> } & { readonly $case: T["$case"] }
28
+ : T extends {} ? { [K in Exclude<keyof T, "$type">]?: DeepPartial<T[K]> }
29
+ : Partial<T>;
@@ -0,0 +1,39 @@
1
+ import {
2
+ type Channel,
3
+ type ChannelCredentials,
4
+ type ChannelOptions,
5
+ type CompatServiceDefinition,
6
+ createChannel,
7
+ type DefaultCallOptions,
8
+ type NormalizedServiceDefinition,
9
+ } from "nice-grpc";
10
+
11
+ export type RemoveTypeUrl<T> = Omit<T, "$type">;
12
+
13
+ export type ClientOptions<S extends CompatServiceDefinition> = {
14
+ defaultCallOptions?: DefaultCallOptions<NormalizedServiceDefinition<S>>;
15
+ };
16
+
17
+ export type HostOrChannel =
18
+ | {
19
+ host: string;
20
+ credentials?: ChannelCredentials;
21
+ channelOptions?: ChannelOptions;
22
+ channel?: never;
23
+ }
24
+ | {
25
+ host?: never;
26
+ channel: Channel;
27
+ };
28
+
29
+ export function createChannelFromConfig(config: HostOrChannel): Channel {
30
+ if (config.host !== undefined) {
31
+ return createChannel(
32
+ config.host,
33
+ config.credentials,
34
+ config.channelOptions,
35
+ );
36
+ }
37
+
38
+ return config.channel;
39
+ }
@@ -0,0 +1,401 @@
1
+ /** biome-ignore-all lint/suspicious/noExplicitAny: <> */
2
+
3
+ // Licensed to the Apache Software Foundation (ASF) under one or mor e
4
+ // contributor license agreements. See the NOTICE file distributed with this
5
+ // work for additional information regarding copyright ownership. The ASF
6
+ // licenses this file to you under the Apache License, Version 2.0 (the
7
+ // "License"); you may not use this file except in compliance with the License.
8
+ // You may obtain a copy of the License at
9
+ //
10
+ // http://www.apache.org/licenses/LICENSE-2.0
11
+ //
12
+ // Unless required by applicable law or agreed to in writing, software
13
+ // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
+ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
+ // License for the specific language governing permissions and limitations under
16
+ // the License.
17
+
18
+ // Originally from the arrow-js repository.
19
+ // https://github.com/apache/arrow-js/blob/e656bcc0423f8c315ec3a66743aa96375eed1e82/src/ipc/reader.ts
20
+ //
21
+ // Changes:
22
+ //
23
+ // - Add `RecordBatchStreamReaderFromFlightData` to decode `RecordBatch` from a
24
+ // stream of `FlightData` messages.
25
+ import {
26
+ type Codec,
27
+ compressionRegistry,
28
+ type Data,
29
+ type DataType,
30
+ type Field,
31
+ Message,
32
+ MessageHeader,
33
+ makeData,
34
+ RecordBatch,
35
+ type Schema,
36
+ Struct,
37
+ type TypeMap,
38
+ Vector,
39
+ } from "apache-arrow";
40
+ import { ITERATOR_DONE } from "apache-arrow/io/interfaces";
41
+ import {
42
+ COMPRESS_LENGTH_PREFIX,
43
+ LENGTH_NO_COMPRESSED_DATA,
44
+ } from "apache-arrow/ipc/compression/constants";
45
+ import * as metadata from "apache-arrow/ipc/metadata/message";
46
+ import { _InternalEmptyPlaceholderRecordBatch } from "apache-arrow/recordbatch";
47
+ import { bigIntToNumber } from "apache-arrow/util/bigint";
48
+ import {
49
+ CompressedVectorLoader,
50
+ VectorLoader,
51
+ } from "apache-arrow/visitor/vectorloader";
52
+ import * as flatbuffers from "flatbuffers";
53
+ import type { FlightData } from "./proto/Flight";
54
+
55
+ const invalidMessageType = (type: MessageHeader) =>
56
+ `Expected ${MessageHeader[type]} Message in stream, but was null or length 0.`;
57
+ // const nullMessage = (type: MessageHeader) =>
58
+ // `Header pointer of flatbuffer-encoded ${MessageHeader[type]} Message is null or length 0.`;
59
+ // const invalidMessageMetadata = (expected: number, actual: number) =>
60
+ // `Expected to read ${expected} metadata bytes, but only read ${actual}.`;
61
+ // const invalidMessageBodyLength = (expected: number, actual: number) =>
62
+ // `Expected to read ${expected} bytes for message body, but only read ${actual}.`;
63
+
64
+ abstract class RecordBatchReaderImpl<T extends TypeMap = any>
65
+ implements RecordBatchReaderImpl<T>
66
+ {
67
+ public declare schema: Schema<T>;
68
+ public closed = false;
69
+ public autoDestroy = true;
70
+ public dictionaries: Map<number, Vector>;
71
+
72
+ protected _dictionaryIndex = 0;
73
+ protected _recordBatchIndex = 0;
74
+ public get numDictionaries() {
75
+ return this._dictionaryIndex;
76
+ }
77
+ public get numRecordBatches() {
78
+ return this._recordBatchIndex;
79
+ }
80
+
81
+ constructor(dictionaries = new Map<number, Vector>()) {
82
+ this.dictionaries = dictionaries;
83
+ }
84
+
85
+ /*
86
+ public isSync(): this is RecordBatchReaders<T> {
87
+ return false;
88
+ }
89
+ public isAsync(): this is AsyncRecordBatchReaders<T> {
90
+ return false;
91
+ }
92
+ public isFile(): this is RecordBatchFileReaders<T> {
93
+ return false;
94
+ }
95
+ public isStream(): this is RecordBatchStreamReaders<T> {
96
+ return false;
97
+ }
98
+ */
99
+
100
+ public reset(schema?: Schema<T> | null) {
101
+ this._dictionaryIndex = 0;
102
+ this._recordBatchIndex = 0;
103
+ this.schema = <any>schema;
104
+ this.dictionaries = new Map();
105
+ return this;
106
+ }
107
+
108
+ protected _loadRecordBatch(
109
+ header: metadata.RecordBatch,
110
+ body: Uint8Array,
111
+ ): RecordBatch<T> {
112
+ // Ensure the body buffer is properly aligned for Apache Arrow
113
+ // Apache Arrow requires 8-byte aligned buffers for typed arrays
114
+ const alignedBody = this._ensureAlignedBuffer(body);
115
+
116
+ let children: Data<any>[];
117
+ if (header.compression != null) {
118
+ const codec = compressionRegistry.get(header.compression.type);
119
+ if (codec?.decode && typeof codec.decode === "function") {
120
+ const { decommpressedBody, buffers } = this._decompressBuffers(
121
+ header,
122
+ alignedBody,
123
+ codec,
124
+ );
125
+ children = this._loadCompressedVectors(
126
+ header,
127
+ decommpressedBody,
128
+ this.schema.fields,
129
+ );
130
+ header = new metadata.RecordBatch(
131
+ header.length,
132
+ header.nodes,
133
+ buffers,
134
+ null,
135
+ );
136
+ } else {
137
+ throw new Error("Record batch is compressed but codec not found");
138
+ }
139
+ } else {
140
+ children = this._loadVectors(header, alignedBody, this.schema.fields);
141
+ }
142
+
143
+ const data = makeData({
144
+ type: new Struct(this.schema.fields),
145
+ length: header.length,
146
+ children,
147
+ });
148
+ return new RecordBatch(this.schema, data);
149
+ }
150
+
151
+ /**
152
+ * Ensures that a buffer is properly aligned for Apache Arrow's requirements.
153
+ * Apache Arrow needs 8-byte aligned buffers for typed array views.
154
+ */
155
+ private _ensureAlignedBuffer(buffer: Uint8Array): Uint8Array {
156
+ // If the buffer's byteOffset is already properly aligned (multiple of 8), return as is
157
+ if (buffer.byteOffset % 8 === 0) {
158
+ return buffer;
159
+ }
160
+
161
+ // Otherwise, create a new properly aligned buffer
162
+ const alignedBuffer = new Uint8Array(buffer.length);
163
+ alignedBuffer.set(buffer);
164
+ return alignedBuffer;
165
+ }
166
+
167
+ protected _loadDictionaryBatch(
168
+ header: metadata.DictionaryBatch,
169
+ body: Uint8Array,
170
+ ) {
171
+ const { id, isDelta } = header;
172
+ const { dictionaries, schema } = this;
173
+ const dictionary = dictionaries.get(id);
174
+ const type = schema.dictionaries.get(id)!;
175
+ let data: Data<any>[];
176
+ if (header.data.compression != null) {
177
+ const codec = compressionRegistry.get(header.data.compression.type);
178
+ if (codec?.decode && typeof codec.decode === "function") {
179
+ const { decommpressedBody, buffers } = this._decompressBuffers(
180
+ header.data,
181
+ body,
182
+ codec,
183
+ );
184
+ data = this._loadCompressedVectors(header.data, decommpressedBody, [
185
+ type,
186
+ ]);
187
+ header = new metadata.DictionaryBatch(
188
+ new metadata.RecordBatch(
189
+ header.data.length,
190
+ header.data.nodes,
191
+ buffers,
192
+ null,
193
+ ),
194
+ id,
195
+ isDelta,
196
+ );
197
+ } else {
198
+ throw new Error("Dictionary batch is compressed but codec not found");
199
+ }
200
+ } else {
201
+ data = this._loadVectors(header.data, body, [type]);
202
+ }
203
+ // const data = this._loadVectors(header.data, body, [type]);
204
+ return (
205
+ dictionary && isDelta
206
+ ? dictionary.concat(new Vector(data))
207
+ : new Vector(data)
208
+ ).memoize() as Vector;
209
+ }
210
+
211
+ protected _loadVectors(
212
+ header: metadata.RecordBatch,
213
+ body: Uint8Array,
214
+ types: (Field | DataType)[],
215
+ ) {
216
+ return new VectorLoader(
217
+ body,
218
+ header.nodes,
219
+ header.buffers,
220
+ this.dictionaries,
221
+ this.schema.metadataVersion,
222
+ ).visitMany(types);
223
+ }
224
+
225
+ protected _loadCompressedVectors(
226
+ header: metadata.RecordBatch,
227
+ body: Uint8Array[],
228
+ types: (Field | DataType)[],
229
+ ) {
230
+ return new CompressedVectorLoader(
231
+ body,
232
+ header.nodes,
233
+ header.buffers,
234
+ this.dictionaries,
235
+ this.schema.metadataVersion,
236
+ ).visitMany(types);
237
+ }
238
+
239
+ private _decompressBuffers(
240
+ header: metadata.RecordBatch,
241
+ body: Uint8Array,
242
+ codec: Codec,
243
+ ): { decommpressedBody: Uint8Array[]; buffers: metadata.BufferRegion[] } {
244
+ const decompressedBuffers: Uint8Array[] = [];
245
+ const newBufferRegions: metadata.BufferRegion[] = [];
246
+
247
+ let currentOffset = 0;
248
+ for (const { offset, length } of header.buffers) {
249
+ if (length === 0) {
250
+ decompressedBuffers.push(new Uint8Array(0));
251
+ newBufferRegions.push(new metadata.BufferRegion(currentOffset, 0));
252
+ continue;
253
+ }
254
+ const byteBuf = new flatbuffers.ByteBuffer(
255
+ body.subarray(offset, offset + length),
256
+ );
257
+ const uncompressedLenth = bigIntToNumber(byteBuf.readInt64(0));
258
+
259
+ const bytes = byteBuf.bytes().subarray(COMPRESS_LENGTH_PREFIX);
260
+
261
+ const decompressed =
262
+ uncompressedLenth === LENGTH_NO_COMPRESSED_DATA
263
+ ? bytes
264
+ : codec.decode!(bytes);
265
+
266
+ decompressedBuffers.push(decompressed);
267
+
268
+ const padding = ((currentOffset + 7) & ~7) - currentOffset;
269
+ currentOffset += padding;
270
+ newBufferRegions.push(
271
+ new metadata.BufferRegion(currentOffset, decompressed.length),
272
+ );
273
+ currentOffset += decompressed.length;
274
+ }
275
+
276
+ return {
277
+ decommpressedBody: decompressedBuffers,
278
+ buffers: newBufferRegions,
279
+ };
280
+ }
281
+ }
282
+
283
+ export class RecordBatchStreamReaderFromFlightData<T extends TypeMap = any>
284
+ extends RecordBatchReaderImpl<T>
285
+ implements AsyncIterableIterator<RecordBatch<T>>
286
+ {
287
+ protected _reader: AsyncIterator<FlightData>;
288
+
289
+ constructor(
290
+ protected _source: AsyncIterable<FlightData>,
291
+ dictionaries?: Map<number, Vector>,
292
+ ) {
293
+ super(dictionaries);
294
+
295
+ this._reader = this._source[Symbol.asyncIterator]();
296
+ }
297
+
298
+ // public isAsync(): this is AsyncRecordBatchReaders<T> {
299
+ // return true;
300
+ // }
301
+
302
+ // public isStream(): this is RecordBatchStreamReaders<T> {
303
+ // return true;
304
+ // }
305
+
306
+ public [Symbol.asyncIterator](): AsyncIterableIterator<RecordBatch<T>> {
307
+ return this as AsyncIterableIterator<RecordBatch<T>>;
308
+ }
309
+
310
+ /*
311
+ public async cancel() {
312
+ if (!this.closed && (this.closed = true)) {
313
+ await this.reset()._reader.return();
314
+ this._reader = <any>null;
315
+ this.dictionaries = <any>null;
316
+ }
317
+ }
318
+
319
+ public async open(options?: OpenOptions) {
320
+ if (!this.closed) {
321
+ this.autoDestroy = shouldAutoDestroy(this, options);
322
+ if (
323
+ !(this.schema || (this.schema = (await this._reader.readSchema())!))
324
+ ) {
325
+ await this.cancel();
326
+ }
327
+ }
328
+ return this;
329
+ }
330
+
331
+ public async throw(value?: any): Promise<IteratorResult<any>> {
332
+ if (!this.closed && this.autoDestroy && (this.closed = true)) {
333
+ return await this.reset()._reader.throw(value);
334
+ }
335
+ return ITERATOR_DONE;
336
+ }
337
+
338
+ public async return(value?: any): Promise<IteratorResult<any>> {
339
+ if (!this.closed && this.autoDestroy && (this.closed = true)) {
340
+ return await this.reset()._reader.return(value);
341
+ }
342
+ return ITERATOR_DONE;
343
+ }
344
+ */
345
+
346
+ public async next() {
347
+ if (this.closed) {
348
+ return ITERATOR_DONE;
349
+ }
350
+
351
+ while (true) {
352
+ const maybeMessage = await this._readNextMessageAndValidate();
353
+ if (!maybeMessage) {
354
+ break;
355
+ }
356
+
357
+ const { message, flight } = maybeMessage;
358
+
359
+ if (message.isSchema()) {
360
+ this.reset(message.header());
361
+ } else if (message.isRecordBatch()) {
362
+ this._recordBatchIndex++;
363
+ const header = message.header();
364
+ const recordBatch = this._loadRecordBatch(header, flight.dataBody);
365
+ return { done: false, value: recordBatch };
366
+ } else if (message.isDictionaryBatch()) {
367
+ this._dictionaryIndex++;
368
+ const header = message.header();
369
+ const vector = this._loadDictionaryBatch(header, flight.dataBody);
370
+ this.dictionaries.set(header.id, vector);
371
+ }
372
+ }
373
+
374
+ if (this.schema && this._recordBatchIndex === 0) {
375
+ this._recordBatchIndex++;
376
+ return {
377
+ done: false,
378
+ value: new _InternalEmptyPlaceholderRecordBatch<T>(this.schema),
379
+ };
380
+ }
381
+
382
+ // return await this.return();
383
+ return ITERATOR_DONE;
384
+ }
385
+
386
+ protected async _readNextMessageAndValidate<T extends MessageHeader>(
387
+ type?: T | null,
388
+ ) {
389
+ const { done, value } = await this._reader.next();
390
+ if (done) {
391
+ return null;
392
+ }
393
+
394
+ const message = Message.decode(value.dataHeader);
395
+ if (type != null && message.headerType !== type) {
396
+ throw new Error(invalidMessageType(type));
397
+ }
398
+
399
+ return { message, flight: value };
400
+ }
401
+ }