@query-farm/vgi-rpc 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +191 -0
- package/README.md +332 -0
- package/dist/client/connect.d.ts +10 -0
- package/dist/client/connect.d.ts.map +1 -0
- package/dist/client/index.d.ts +6 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/introspect.d.ts +30 -0
- package/dist/client/introspect.d.ts.map +1 -0
- package/dist/client/ipc.d.ts +34 -0
- package/dist/client/ipc.d.ts.map +1 -0
- package/dist/client/pipe.d.ts +63 -0
- package/dist/client/pipe.d.ts.map +1 -0
- package/dist/client/stream.d.ts +52 -0
- package/dist/client/stream.d.ts.map +1 -0
- package/dist/client/types.d.ts +25 -0
- package/dist/client/types.d.ts.map +1 -0
- package/dist/constants.d.ts +15 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/dispatch/describe.d.ts +14 -0
- package/dist/dispatch/describe.d.ts.map +1 -0
- package/dist/dispatch/stream.d.ts +20 -0
- package/dist/dispatch/stream.d.ts.map +1 -0
- package/dist/dispatch/unary.d.ts +9 -0
- package/dist/dispatch/unary.d.ts.map +1 -0
- package/dist/errors.d.ts +12 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/http/common.d.ts +16 -0
- package/dist/http/common.d.ts.map +1 -0
- package/dist/http/dispatch.d.ts +18 -0
- package/dist/http/dispatch.d.ts.map +1 -0
- package/dist/http/handler.d.ts +16 -0
- package/dist/http/handler.d.ts.map +1 -0
- package/dist/http/index.d.ts +4 -0
- package/dist/http/index.d.ts.map +1 -0
- package/dist/http/token.d.ts +24 -0
- package/dist/http/token.d.ts.map +1 -0
- package/dist/http/types.d.ts +30 -0
- package/dist/http/types.d.ts.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2493 -0
- package/dist/index.js.map +34 -0
- package/dist/protocol.d.ts +62 -0
- package/dist/protocol.d.ts.map +1 -0
- package/dist/schema.d.ts +38 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/server.d.ts +19 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/types.d.ts +71 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/util/schema.d.ts +20 -0
- package/dist/util/schema.d.ts.map +1 -0
- package/dist/util/zstd.d.ts +5 -0
- package/dist/util/zstd.d.ts.map +1 -0
- package/dist/wire/reader.d.ts +40 -0
- package/dist/wire/reader.d.ts.map +1 -0
- package/dist/wire/request.d.ts +15 -0
- package/dist/wire/request.d.ts.map +1 -0
- package/dist/wire/response.d.ts +25 -0
- package/dist/wire/response.d.ts.map +1 -0
- package/dist/wire/writer.d.ts +59 -0
- package/dist/wire/writer.d.ts.map +1 -0
- package/package.json +32 -0
- package/src/client/connect.ts +310 -0
- package/src/client/index.ts +14 -0
- package/src/client/introspect.ts +138 -0
- package/src/client/ipc.ts +225 -0
- package/src/client/pipe.ts +661 -0
- package/src/client/stream.ts +297 -0
- package/src/client/types.ts +31 -0
- package/src/constants.ts +22 -0
- package/src/dispatch/describe.ts +155 -0
- package/src/dispatch/stream.ts +151 -0
- package/src/dispatch/unary.ts +35 -0
- package/src/errors.ts +22 -0
- package/src/http/common.ts +89 -0
- package/src/http/dispatch.ts +340 -0
- package/src/http/handler.ts +247 -0
- package/src/http/index.ts +6 -0
- package/src/http/token.ts +149 -0
- package/src/http/types.ts +49 -0
- package/src/index.ts +52 -0
- package/src/protocol.ts +144 -0
- package/src/schema.ts +114 -0
- package/src/server.ts +159 -0
- package/src/types.ts +162 -0
- package/src/util/schema.ts +31 -0
- package/src/util/zstd.ts +49 -0
- package/src/wire/reader.ts +113 -0
- package/src/wire/request.ts +98 -0
- package/src/wire/response.ts +181 -0
- package/src/wire/writer.ts +137 -0
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
// © Copyright 2025-2026, Query.Farm LLC - https://query.farm
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import { RecordBatchReader, RecordBatch, type Schema } from "apache-arrow";
|
|
5
|
+
import {
|
|
6
|
+
DESCRIBE_METHOD_NAME,
|
|
7
|
+
PROTOCOL_NAME_KEY,
|
|
8
|
+
DESCRIBE_VERSION_KEY,
|
|
9
|
+
} from "../constants.js";
|
|
10
|
+
import { ARROW_CONTENT_TYPE } from "../http/common.js";
|
|
11
|
+
import { buildRequestIpc, readResponseBatches, dispatchLogOrError } from "./ipc.js";
|
|
12
|
+
import { Schema as ArrowSchema } from "apache-arrow";
|
|
13
|
+
import type { LogMessage } from "./types.js";
|
|
14
|
+
|
|
15
|
+
export interface MethodInfo {
|
|
16
|
+
name: string;
|
|
17
|
+
type: "unary" | "stream";
|
|
18
|
+
paramsSchema: Schema;
|
|
19
|
+
resultSchema: Schema;
|
|
20
|
+
inputSchema?: Schema;
|
|
21
|
+
outputSchema?: Schema;
|
|
22
|
+
headerSchema?: Schema;
|
|
23
|
+
doc?: string;
|
|
24
|
+
paramTypes?: Record<string, string>;
|
|
25
|
+
defaults?: Record<string, any>;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface ServiceDescription {
|
|
29
|
+
protocolName: string;
|
|
30
|
+
methods: MethodInfo[];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/** Deserialize a schema from IPC bytes (schema message + EOS). */
|
|
34
|
+
async function deserializeSchema(bytes: Uint8Array): Promise<Schema> {
|
|
35
|
+
const reader = await RecordBatchReader.from(bytes);
|
|
36
|
+
await reader.open();
|
|
37
|
+
return reader.schema!;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Parse a __describe__ response from batches into a ServiceDescription.
|
|
42
|
+
* Reusable across transports (HTTP, pipe, subprocess).
|
|
43
|
+
*/
|
|
44
|
+
export async function parseDescribeResponse(
|
|
45
|
+
batches: RecordBatch[],
|
|
46
|
+
onLog?: (msg: LogMessage) => void,
|
|
47
|
+
): Promise<ServiceDescription> {
|
|
48
|
+
// Find the data batch (skip log/error batches)
|
|
49
|
+
let dataBatch = null;
|
|
50
|
+
for (const batch of batches) {
|
|
51
|
+
if (batch.numRows === 0) {
|
|
52
|
+
dispatchLogOrError(batch, onLog);
|
|
53
|
+
continue;
|
|
54
|
+
}
|
|
55
|
+
dataBatch = batch;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (!dataBatch) {
|
|
59
|
+
throw new Error("Empty __describe__ response");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Extract metadata from batch
|
|
63
|
+
const meta = dataBatch.metadata;
|
|
64
|
+
const protocolName = meta?.get(PROTOCOL_NAME_KEY) ?? "";
|
|
65
|
+
|
|
66
|
+
const methods: MethodInfo[] = [];
|
|
67
|
+
for (let i = 0; i < dataBatch.numRows; i++) {
|
|
68
|
+
const name = dataBatch.getChildAt(0)!.get(i) as string; // name
|
|
69
|
+
const methodType = dataBatch.getChildAt(1)!.get(i) as string; // method_type
|
|
70
|
+
const doc = dataBatch.getChildAt(2)?.get(i) as string | null; // doc
|
|
71
|
+
const hasReturn = dataBatch.getChildAt(3)!.get(i) as boolean; // has_return
|
|
72
|
+
const paramsIpc = dataBatch.getChildAt(4)!.get(i) as Uint8Array; // params_schema_ipc
|
|
73
|
+
const resultIpc = dataBatch.getChildAt(5)!.get(i) as Uint8Array; // result_schema_ipc
|
|
74
|
+
const paramTypesJson = dataBatch.getChildAt(6)?.get(i) as string | null; // param_types_json
|
|
75
|
+
const paramDefaultsJson = dataBatch.getChildAt(7)?.get(i) as string | null; // param_defaults_json
|
|
76
|
+
const hasHeader = dataBatch.getChildAt(8)!.get(i) as boolean; // has_header
|
|
77
|
+
const headerIpc = dataBatch.getChildAt(9)?.get(i) as Uint8Array | null; // header_schema_ipc
|
|
78
|
+
|
|
79
|
+
const paramsSchema = await deserializeSchema(paramsIpc);
|
|
80
|
+
const resultSchema = await deserializeSchema(resultIpc);
|
|
81
|
+
|
|
82
|
+
let paramTypes: Record<string, string> | undefined;
|
|
83
|
+
if (paramTypesJson) {
|
|
84
|
+
try { paramTypes = JSON.parse(paramTypesJson); } catch {}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
let defaults: Record<string, any> | undefined;
|
|
88
|
+
if (paramDefaultsJson) {
|
|
89
|
+
try { defaults = JSON.parse(paramDefaultsJson); } catch {}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const info: MethodInfo = {
|
|
93
|
+
name,
|
|
94
|
+
type: methodType as "unary" | "stream",
|
|
95
|
+
paramsSchema,
|
|
96
|
+
resultSchema,
|
|
97
|
+
doc: doc ?? undefined,
|
|
98
|
+
paramTypes,
|
|
99
|
+
defaults,
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
// For stream methods, result_schema_ipc actually holds the output schema
|
|
103
|
+
if (methodType === "stream") {
|
|
104
|
+
info.outputSchema = resultSchema;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (hasHeader && headerIpc) {
|
|
108
|
+
info.headerSchema = await deserializeSchema(headerIpc);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
methods.push(info);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return { protocolName, methods };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Send a __describe__ request and return a ServiceDescription.
|
|
119
|
+
*/
|
|
120
|
+
export async function httpIntrospect(
|
|
121
|
+
baseUrl: string,
|
|
122
|
+
options?: { prefix?: string },
|
|
123
|
+
): Promise<ServiceDescription> {
|
|
124
|
+
const prefix = options?.prefix ?? "/vgi";
|
|
125
|
+
const emptySchema = new ArrowSchema([]);
|
|
126
|
+
const body = buildRequestIpc(emptySchema, {}, DESCRIBE_METHOD_NAME);
|
|
127
|
+
|
|
128
|
+
const response = await fetch(`${baseUrl}${prefix}/${DESCRIBE_METHOD_NAME}`, {
|
|
129
|
+
method: "POST",
|
|
130
|
+
headers: { "Content-Type": ARROW_CONTENT_TYPE },
|
|
131
|
+
body: body as unknown as BodyInit,
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
const responseBody = new Uint8Array(await response.arrayBuffer());
|
|
135
|
+
const { batches } = await readResponseBatches(responseBody);
|
|
136
|
+
|
|
137
|
+
return parseDescribeResponse(batches);
|
|
138
|
+
}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
// © Copyright 2025-2026, Query.Farm LLC - https://query.farm
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import {
|
|
5
|
+
RecordBatch,
|
|
6
|
+
RecordBatchReader,
|
|
7
|
+
Schema,
|
|
8
|
+
DataType,
|
|
9
|
+
Float64,
|
|
10
|
+
Int64,
|
|
11
|
+
Utf8,
|
|
12
|
+
Bool,
|
|
13
|
+
Binary,
|
|
14
|
+
vectorFromArray,
|
|
15
|
+
makeData,
|
|
16
|
+
Struct,
|
|
17
|
+
} from "apache-arrow";
|
|
18
|
+
import {
|
|
19
|
+
RPC_METHOD_KEY,
|
|
20
|
+
REQUEST_VERSION_KEY,
|
|
21
|
+
REQUEST_VERSION,
|
|
22
|
+
LOG_LEVEL_KEY,
|
|
23
|
+
LOG_MESSAGE_KEY,
|
|
24
|
+
LOG_EXTRA_KEY,
|
|
25
|
+
} from "../constants.js";
|
|
26
|
+
import { RpcError } from "../errors.js";
|
|
27
|
+
import { serializeIpcStream } from "../http/common.js";
|
|
28
|
+
import { IpcStreamReader, type StreamMessage } from "../wire/reader.js";
|
|
29
|
+
import type { LogMessage } from "./types.js";
|
|
30
|
+
|
|
31
|
+
/** Infer an Arrow DataType from a JS value. */
|
|
32
|
+
export function inferArrowType(value: any): DataType {
|
|
33
|
+
if (typeof value === "string") return new Utf8();
|
|
34
|
+
if (typeof value === "boolean") return new Bool();
|
|
35
|
+
if (typeof value === "bigint") return new Int64();
|
|
36
|
+
if (typeof value === "number") return new Float64();
|
|
37
|
+
if (value instanceof Uint8Array) return new Binary();
|
|
38
|
+
return new Utf8(); // fallback
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Recursively coerce JS values to match Arrow type expectations.
|
|
43
|
+
* Converts numbers to BigInt for Int64 fields, and recurses into Map/List types.
|
|
44
|
+
*/
|
|
45
|
+
function coerceForArrow(type: DataType, value: any): any {
|
|
46
|
+
if (value == null) return value;
|
|
47
|
+
|
|
48
|
+
// Int64: convert number → BigInt
|
|
49
|
+
if (DataType.isInt(type) && (type as any).bitWidth === 64) {
|
|
50
|
+
if (typeof value === "number") return BigInt(value);
|
|
51
|
+
return value;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Map_: coerce map values recursively
|
|
55
|
+
if (DataType.isMap(type)) {
|
|
56
|
+
if (value instanceof Map) {
|
|
57
|
+
const entriesField = (type as any).children[0];
|
|
58
|
+
const valueType = entriesField.type.children[1].type;
|
|
59
|
+
const coerced = new Map();
|
|
60
|
+
for (const [k, v] of value) {
|
|
61
|
+
coerced.set(k, coerceForArrow(valueType, v));
|
|
62
|
+
}
|
|
63
|
+
return coerced;
|
|
64
|
+
}
|
|
65
|
+
return value;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// List: coerce elements recursively
|
|
69
|
+
if (DataType.isList(type)) {
|
|
70
|
+
if (Array.isArray(value)) {
|
|
71
|
+
const elemType = (type as any).children[0].type;
|
|
72
|
+
return value.map((v: any) => coerceForArrow(elemType, v));
|
|
73
|
+
}
|
|
74
|
+
return value;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return value;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Build a 1-row Arrow IPC request batch with method metadata.
|
|
82
|
+
*/
|
|
83
|
+
export function buildRequestIpc(
|
|
84
|
+
schema: Schema,
|
|
85
|
+
params: Record<string, any>,
|
|
86
|
+
method: string,
|
|
87
|
+
): Uint8Array {
|
|
88
|
+
const metadata = new Map<string, string>();
|
|
89
|
+
metadata.set(RPC_METHOD_KEY, method);
|
|
90
|
+
metadata.set(REQUEST_VERSION_KEY, REQUEST_VERSION);
|
|
91
|
+
|
|
92
|
+
if (schema.fields.length === 0) {
|
|
93
|
+
const structType = new Struct(schema.fields);
|
|
94
|
+
const data = makeData({
|
|
95
|
+
type: structType,
|
|
96
|
+
length: 1,
|
|
97
|
+
children: [],
|
|
98
|
+
nullCount: 0,
|
|
99
|
+
});
|
|
100
|
+
const batch = new RecordBatch(schema, data, metadata);
|
|
101
|
+
return serializeIpcStream(schema, [batch]);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const children = schema.fields.map((f) => {
|
|
105
|
+
const val = coerceForArrow(f.type, params[f.name]);
|
|
106
|
+
return vectorFromArray([val], f.type).data[0];
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
const structType = new Struct(schema.fields);
|
|
110
|
+
const data = makeData({
|
|
111
|
+
type: structType,
|
|
112
|
+
length: 1,
|
|
113
|
+
children,
|
|
114
|
+
nullCount: 0,
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
const batch = new RecordBatch(schema, data, metadata);
|
|
118
|
+
return serializeIpcStream(schema, [batch]);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Read schema + all batches from an IPC stream body.
|
|
123
|
+
*/
|
|
124
|
+
export async function readResponseBatches(
|
|
125
|
+
body: Uint8Array,
|
|
126
|
+
): Promise<{ schema: Schema; batches: RecordBatch[] }> {
|
|
127
|
+
const reader = await RecordBatchReader.from(body);
|
|
128
|
+
await reader.open();
|
|
129
|
+
const schema = reader.schema;
|
|
130
|
+
if (!schema) {
|
|
131
|
+
throw new RpcError("ProtocolError", "Empty IPC stream: no schema", "");
|
|
132
|
+
}
|
|
133
|
+
const batches = reader.readAll();
|
|
134
|
+
return { schema, batches };
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Check if a zero-row batch carries log/error metadata.
|
|
139
|
+
* If EXCEPTION → throw RpcError.
|
|
140
|
+
* If other level → call onLog.
|
|
141
|
+
* Returns true if the batch was consumed as a log/error.
|
|
142
|
+
*/
|
|
143
|
+
export function dispatchLogOrError(
|
|
144
|
+
batch: RecordBatch,
|
|
145
|
+
onLog?: (msg: LogMessage) => void,
|
|
146
|
+
): boolean {
|
|
147
|
+
const meta = batch.metadata;
|
|
148
|
+
if (!meta) return false;
|
|
149
|
+
|
|
150
|
+
const level = meta.get(LOG_LEVEL_KEY);
|
|
151
|
+
if (!level) return false;
|
|
152
|
+
|
|
153
|
+
const message = meta.get(LOG_MESSAGE_KEY) ?? "";
|
|
154
|
+
|
|
155
|
+
if (level === "EXCEPTION") {
|
|
156
|
+
const extraStr = meta.get(LOG_EXTRA_KEY);
|
|
157
|
+
let errorType = "RpcError";
|
|
158
|
+
let errorMessage = message;
|
|
159
|
+
let traceback = "";
|
|
160
|
+
if (extraStr) {
|
|
161
|
+
try {
|
|
162
|
+
const extra = JSON.parse(extraStr);
|
|
163
|
+
errorType = extra.exception_type ?? "RpcError";
|
|
164
|
+
errorMessage = extra.exception_message ?? message;
|
|
165
|
+
traceback = extra.traceback ?? "";
|
|
166
|
+
} catch {}
|
|
167
|
+
}
|
|
168
|
+
throw new RpcError(errorType, errorMessage, traceback);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
if (onLog) {
|
|
172
|
+
const extraStr = meta.get(LOG_EXTRA_KEY);
|
|
173
|
+
let extra: Record<string, any> | undefined;
|
|
174
|
+
if (extraStr) {
|
|
175
|
+
try {
|
|
176
|
+
extra = JSON.parse(extraStr);
|
|
177
|
+
} catch {}
|
|
178
|
+
}
|
|
179
|
+
onLog({ level, message, extra });
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return true;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Extract all rows from a batch as Record<string, any>[].
|
|
187
|
+
* Converts BigInt to Number when safe.
|
|
188
|
+
*/
|
|
189
|
+
export function extractBatchRows(batch: RecordBatch): Record<string, any>[] {
|
|
190
|
+
const rows: Record<string, any>[] = [];
|
|
191
|
+
for (let r = 0; r < batch.numRows; r++) {
|
|
192
|
+
const row: Record<string, any> = {};
|
|
193
|
+
for (let i = 0; i < batch.schema.fields.length; i++) {
|
|
194
|
+
const field = batch.schema.fields[i];
|
|
195
|
+
let value = batch.getChildAt(i)?.get(r);
|
|
196
|
+
if (typeof value === "bigint") {
|
|
197
|
+
if (
|
|
198
|
+
value >= BigInt(Number.MIN_SAFE_INTEGER) &&
|
|
199
|
+
value <= BigInt(Number.MAX_SAFE_INTEGER)
|
|
200
|
+
) {
|
|
201
|
+
value = Number(value);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
row[field.name] = value;
|
|
205
|
+
}
|
|
206
|
+
rows.push(row);
|
|
207
|
+
}
|
|
208
|
+
return rows;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Read sequential IPC streams from a response body.
|
|
213
|
+
* Returns an IpcStreamReader for reading header + data streams.
|
|
214
|
+
*/
|
|
215
|
+
export async function readSequentialStreams(
|
|
216
|
+
body: Uint8Array,
|
|
217
|
+
): Promise<IpcStreamReader> {
|
|
218
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
219
|
+
start(controller) {
|
|
220
|
+
controller.enqueue(body);
|
|
221
|
+
controller.close();
|
|
222
|
+
},
|
|
223
|
+
});
|
|
224
|
+
return IpcStreamReader.create(stream);
|
|
225
|
+
}
|