@query-farm/vgi-rpc 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +191 -0
- package/README.md +332 -0
- package/dist/client/connect.d.ts +10 -0
- package/dist/client/connect.d.ts.map +1 -0
- package/dist/client/index.d.ts +6 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/introspect.d.ts +30 -0
- package/dist/client/introspect.d.ts.map +1 -0
- package/dist/client/ipc.d.ts +34 -0
- package/dist/client/ipc.d.ts.map +1 -0
- package/dist/client/pipe.d.ts +63 -0
- package/dist/client/pipe.d.ts.map +1 -0
- package/dist/client/stream.d.ts +52 -0
- package/dist/client/stream.d.ts.map +1 -0
- package/dist/client/types.d.ts +25 -0
- package/dist/client/types.d.ts.map +1 -0
- package/dist/constants.d.ts +15 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/dispatch/describe.d.ts +14 -0
- package/dist/dispatch/describe.d.ts.map +1 -0
- package/dist/dispatch/stream.d.ts +20 -0
- package/dist/dispatch/stream.d.ts.map +1 -0
- package/dist/dispatch/unary.d.ts +9 -0
- package/dist/dispatch/unary.d.ts.map +1 -0
- package/dist/errors.d.ts +12 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/http/common.d.ts +16 -0
- package/dist/http/common.d.ts.map +1 -0
- package/dist/http/dispatch.d.ts +18 -0
- package/dist/http/dispatch.d.ts.map +1 -0
- package/dist/http/handler.d.ts +16 -0
- package/dist/http/handler.d.ts.map +1 -0
- package/dist/http/index.d.ts +4 -0
- package/dist/http/index.d.ts.map +1 -0
- package/dist/http/token.d.ts +24 -0
- package/dist/http/token.d.ts.map +1 -0
- package/dist/http/types.d.ts +30 -0
- package/dist/http/types.d.ts.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2493 -0
- package/dist/index.js.map +34 -0
- package/dist/protocol.d.ts +62 -0
- package/dist/protocol.d.ts.map +1 -0
- package/dist/schema.d.ts +38 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/server.d.ts +19 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/types.d.ts +71 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/util/schema.d.ts +20 -0
- package/dist/util/schema.d.ts.map +1 -0
- package/dist/util/zstd.d.ts +5 -0
- package/dist/util/zstd.d.ts.map +1 -0
- package/dist/wire/reader.d.ts +40 -0
- package/dist/wire/reader.d.ts.map +1 -0
- package/dist/wire/request.d.ts +15 -0
- package/dist/wire/request.d.ts.map +1 -0
- package/dist/wire/response.d.ts +25 -0
- package/dist/wire/response.d.ts.map +1 -0
- package/dist/wire/writer.d.ts +59 -0
- package/dist/wire/writer.d.ts.map +1 -0
- package/package.json +32 -0
- package/src/client/connect.ts +310 -0
- package/src/client/index.ts +14 -0
- package/src/client/introspect.ts +138 -0
- package/src/client/ipc.ts +225 -0
- package/src/client/pipe.ts +661 -0
- package/src/client/stream.ts +297 -0
- package/src/client/types.ts +31 -0
- package/src/constants.ts +22 -0
- package/src/dispatch/describe.ts +155 -0
- package/src/dispatch/stream.ts +151 -0
- package/src/dispatch/unary.ts +35 -0
- package/src/errors.ts +22 -0
- package/src/http/common.ts +89 -0
- package/src/http/dispatch.ts +340 -0
- package/src/http/handler.ts +247 -0
- package/src/http/index.ts +6 -0
- package/src/http/token.ts +149 -0
- package/src/http/types.ts +49 -0
- package/src/index.ts +52 -0
- package/src/protocol.ts +144 -0
- package/src/schema.ts +114 -0
- package/src/server.ts +159 -0
- package/src/types.ts +162 -0
- package/src/util/schema.ts +31 -0
- package/src/util/zstd.ts +49 -0
- package/src/wire/reader.ts +113 -0
- package/src/wire/request.ts +98 -0
- package/src/wire/response.ts +181 -0
- package/src/wire/writer.ts +137 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
// © Copyright 2025-2026, Query.Farm LLC - https://query.farm
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import {
|
|
5
|
+
RecordBatchStreamWriter,
|
|
6
|
+
type RecordBatch,
|
|
7
|
+
type Schema,
|
|
8
|
+
} from "apache-arrow";
|
|
9
|
+
import { writeSync } from "node:fs";
|
|
10
|
+
|
|
11
|
+
const STDOUT_FD = 1;
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Write all bytes to a file descriptor, looping on partial writes.
|
|
15
|
+
* Handles EAGAIN (pipe buffer full) by busy-waiting with Atomics.wait().
|
|
16
|
+
* writeSync() can return fewer bytes than requested when the pipe buffer
|
|
17
|
+
* is full (e.g., 64KB limit), and throws EAGAIN on non-blocking fds.
|
|
18
|
+
*/
|
|
19
|
+
function writeAll(fd: number, data: Uint8Array): void {
|
|
20
|
+
let offset = 0;
|
|
21
|
+
while (offset < data.length) {
|
|
22
|
+
try {
|
|
23
|
+
const written = writeSync(fd, data, offset, data.length - offset);
|
|
24
|
+
if (written <= 0) throw new Error(`writeSync returned ${written}`);
|
|
25
|
+
offset += written;
|
|
26
|
+
} catch (e: any) {
|
|
27
|
+
if (e.code === "EAGAIN") {
|
|
28
|
+
// Pipe buffer full — busy-wait briefly then retry
|
|
29
|
+
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 1);
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
throw e;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Writes sequential IPC streams to a file descriptor (e.g., stdout).
|
|
39
|
+
* Each call to writeStream() writes a complete IPC stream: schema + batches + EOS.
|
|
40
|
+
*
|
|
41
|
+
* All writes use synchronous I/O (writeSync) to avoid deadlocks when
|
|
42
|
+
* interleaving stdout writes with blocking stdin reads.
|
|
43
|
+
*/
|
|
44
|
+
export class IpcStreamWriter {
|
|
45
|
+
private readonly fd: number;
|
|
46
|
+
|
|
47
|
+
constructor(fd: number = STDOUT_FD) {
|
|
48
|
+
this.fd = fd;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Write a complete IPC stream with the given schema and batches.
|
|
53
|
+
* Creates schema message, writes all batches (with their metadata), writes EOS.
|
|
54
|
+
*/
|
|
55
|
+
writeStream(schema: Schema, batches: RecordBatch[]): void {
|
|
56
|
+
const writer = new RecordBatchStreamWriter();
|
|
57
|
+
writer.reset(undefined, schema);
|
|
58
|
+
for (const batch of batches) {
|
|
59
|
+
// Use _writeRecordBatch to bypass schema comparison (see IncrementalStream.write)
|
|
60
|
+
(writer as any)._writeRecordBatch(batch);
|
|
61
|
+
}
|
|
62
|
+
writer.close();
|
|
63
|
+
const bytes = writer.toUint8Array(true);
|
|
64
|
+
writeAll(this.fd, bytes);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Open an incremental IPC stream for writing batches one at a time.
|
|
69
|
+
* Used for streaming methods where output batches are produced incrementally.
|
|
70
|
+
* Bytes are written synchronously after each batch.
|
|
71
|
+
*/
|
|
72
|
+
openStream(schema: Schema): IncrementalStream {
|
|
73
|
+
return new IncrementalStream(this.fd, schema);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* An open IPC stream that supports incremental batch writes.
|
|
79
|
+
*
|
|
80
|
+
* Uses RecordBatchStreamWriter with internal buffering (no pipe to stdout).
|
|
81
|
+
* After each operation, drains the writer's internal AsyncByteQueue buffer
|
|
82
|
+
* and writes bytes synchronously via writeAll(). This avoids deadlocks
|
|
83
|
+
* caused by Node.js async stream piping when stdin reads block before
|
|
84
|
+
* stdout writes flush through the event loop.
|
|
85
|
+
*/
|
|
86
|
+
export class IncrementalStream {
|
|
87
|
+
private writer: RecordBatchStreamWriter;
|
|
88
|
+
private readonly fd: number;
|
|
89
|
+
private closed = false;
|
|
90
|
+
|
|
91
|
+
constructor(fd: number, schema: Schema) {
|
|
92
|
+
this.fd = fd;
|
|
93
|
+
this.writer = new RecordBatchStreamWriter();
|
|
94
|
+
// Buffer internally (no sink) — we drain manually via writeAll
|
|
95
|
+
this.writer.reset(undefined, schema);
|
|
96
|
+
this.drain();
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Write a single batch to the stream. Bytes are flushed synchronously.
|
|
101
|
+
*
|
|
102
|
+
* Uses _writeRecordBatch() directly to bypass the Arrow writer's schema
|
|
103
|
+
* comparison in write(). The public write() method calls compareSchemas()
|
|
104
|
+
* and auto-closes the writer if the batch's schema differs (e.g., in
|
|
105
|
+
* nullability), silently dropping the batch. Since our output schema is
|
|
106
|
+
* set at stream open time and all batches are structurally compatible,
|
|
107
|
+
* we skip the comparison.
|
|
108
|
+
*/
|
|
109
|
+
write(batch: RecordBatch): void {
|
|
110
|
+
if (this.closed) throw new Error("Stream already closed");
|
|
111
|
+
(this.writer as any)._writeRecordBatch(batch);
|
|
112
|
+
this.drain();
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Close the stream (writes EOS marker synchronously).
|
|
117
|
+
*/
|
|
118
|
+
close(): void {
|
|
119
|
+
if (this.closed) return;
|
|
120
|
+
this.closed = true;
|
|
121
|
+
// EOS marker: continuation (0xFFFFFFFF) + metadata length (0x00000000)
|
|
122
|
+
const eos = new Uint8Array(new Int32Array([-1, 0]).buffer);
|
|
123
|
+
writeAll(this.fd, eos);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Drain buffered bytes from the Arrow writer's internal queue
|
|
128
|
+
* and write them synchronously to the output fd.
|
|
129
|
+
*/
|
|
130
|
+
private drain(): void {
|
|
131
|
+
const values = (this.writer as any)._sink._values as Uint8Array[];
|
|
132
|
+
for (const chunk of values) {
|
|
133
|
+
writeAll(this.fd, chunk);
|
|
134
|
+
}
|
|
135
|
+
values.length = 0;
|
|
136
|
+
}
|
|
137
|
+
}
|