@query-farm/vgi-rpc 0.3.1 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/client/connect.d.ts.map +1 -1
  2. package/dist/client/index.d.ts +3 -3
  3. package/dist/client/index.d.ts.map +1 -1
  4. package/dist/client/introspect.d.ts +1 -1
  5. package/dist/client/introspect.d.ts.map +1 -1
  6. package/dist/client/ipc.d.ts +1 -1
  7. package/dist/client/ipc.d.ts.map +1 -1
  8. package/dist/client/pipe.d.ts +1 -1
  9. package/dist/client/pipe.d.ts.map +1 -1
  10. package/dist/client/stream.d.ts.map +1 -1
  11. package/dist/dispatch/describe.d.ts +1 -1
  12. package/dist/dispatch/describe.d.ts.map +1 -1
  13. package/dist/dispatch/stream.d.ts +1 -1
  14. package/dist/dispatch/stream.d.ts.map +1 -1
  15. package/dist/dispatch/unary.d.ts.map +1 -1
  16. package/dist/http/common.d.ts +1 -1
  17. package/dist/http/common.d.ts.map +1 -1
  18. package/dist/http/dispatch.d.ts.map +1 -1
  19. package/dist/http/handler.d.ts.map +1 -1
  20. package/dist/http/index.d.ts +2 -2
  21. package/dist/http/index.d.ts.map +1 -1
  22. package/dist/http/token.d.ts.map +1 -1
  23. package/dist/http/types.d.ts.map +1 -1
  24. package/dist/index.d.ts +7 -7
  25. package/dist/index.d.ts.map +1 -1
  26. package/dist/index.js +2171 -2170
  27. package/dist/index.js.map +26 -25
  28. package/dist/protocol.d.ts +1 -1
  29. package/dist/protocol.d.ts.map +1 -1
  30. package/dist/schema.d.ts +1 -1
  31. package/dist/schema.d.ts.map +1 -1
  32. package/dist/server.d.ts +1 -1
  33. package/dist/server.d.ts.map +1 -1
  34. package/dist/types.d.ts +1 -1
  35. package/dist/types.d.ts.map +1 -1
  36. package/dist/util/conform.d.ts +16 -0
  37. package/dist/util/conform.d.ts.map +1 -0
  38. package/dist/util/zstd.d.ts.map +1 -1
  39. package/dist/wire/reader.d.ts.map +1 -1
  40. package/dist/wire/request.d.ts +1 -1
  41. package/dist/wire/request.d.ts.map +1 -1
  42. package/dist/wire/response.d.ts +1 -1
  43. package/dist/wire/response.d.ts.map +1 -1
  44. package/dist/wire/writer.d.ts.map +1 -1
  45. package/package.json +6 -2
  46. package/src/client/connect.ts +12 -20
  47. package/src/client/index.ts +8 -8
  48. package/src/client/introspect.ts +11 -15
  49. package/src/client/ipc.ts +17 -31
  50. package/src/client/pipe.ts +16 -36
  51. package/src/client/stream.ts +20 -46
  52. package/src/dispatch/describe.ts +14 -26
  53. package/src/dispatch/stream.ts +21 -19
  54. package/src/dispatch/unary.ts +1 -2
  55. package/src/http/common.ts +6 -40
  56. package/src/http/dispatch.ts +36 -87
  57. package/src/http/handler.ts +20 -55
  58. package/src/http/index.ts +2 -2
  59. package/src/http/token.ts +2 -7
  60. package/src/http/types.ts +2 -6
  61. package/src/index.ts +43 -43
  62. package/src/protocol.ts +7 -7
  63. package/src/schema.ts +11 -15
  64. package/src/server.ts +14 -34
  65. package/src/types.ts +9 -36
  66. package/src/util/conform.ts +31 -0
  67. package/src/util/zstd.ts +2 -8
  68. package/src/wire/reader.ts +2 -4
  69. package/src/wire/request.ts +4 -15
  70. package/src/wire/response.ts +8 -24
  71. package/src/wire/writer.ts +1 -5
@@ -1,23 +1,11 @@
1
1
  // © Copyright 2025-2026, Query.Farm LLC - https://query.farm
2
2
  // SPDX-License-Identifier: Apache-2.0
3
3
 
4
- import {
5
- RecordBatch,
6
- Schema,
7
- Field,
8
- makeData,
9
- Struct,
10
- vectorFromArray,
11
- } from "@query-farm/apache-arrow";
4
+ import { Field, makeData, RecordBatch, Schema, Struct, vectorFromArray } from "@query-farm/apache-arrow";
12
5
  import { STATE_KEY } from "../constants.js";
13
6
  import { RpcError } from "../errors.js";
14
7
  import { ARROW_CONTENT_TYPE, serializeIpcStream } from "../http/common.js";
15
- import {
16
- inferArrowType,
17
- dispatchLogOrError,
18
- extractBatchRows,
19
- readResponseBatches,
20
- } from "./ipc.js";
8
+ import { dispatchLogOrError, extractBatchRows, inferArrowType, readResponseBatches } from "./ipc.js";
21
9
  import type { LogMessage, StreamSession } from "./types.js";
22
10
 
23
11
  type CompressFn = (data: Uint8Array, level: number) => Uint8Array;
@@ -103,11 +91,7 @@ export class HttpStreamSession implements StreamSession {
103
91
  */
104
92
  async exchange(input: Record<string, any>[]): Promise<Record<string, any>[]> {
105
93
  if (this._stateToken === null) {
106
- throw new RpcError(
107
- "ProtocolError",
108
- "Stream has finished \u2014 no state token available",
109
- "",
110
- );
94
+ throw new RpcError("ProtocolError", "Stream has finished \u2014 no state token available", "");
111
95
  }
112
96
 
113
97
  // We need to determine the input schema from the data.
@@ -122,11 +106,7 @@ export class HttpStreamSession implements StreamSession {
122
106
  const emptyBatch = this._buildEmptyBatch(zeroSchema);
123
107
  const metadata = new Map<string, string>();
124
108
  metadata.set(STATE_KEY, this._stateToken);
125
- const batchWithMeta = new RecordBatch(
126
- zeroSchema,
127
- emptyBatch.data,
128
- metadata,
129
- );
109
+ const batchWithMeta = new RecordBatch(zeroSchema, emptyBatch.data, metadata);
130
110
  return this._doExchange(zeroSchema, [batchWithMeta]);
131
111
  }
132
112
 
@@ -134,9 +114,12 @@ export class HttpStreamSession implements StreamSession {
134
114
  const keys = Object.keys(input[0]);
135
115
  const fields = keys.map((key) => {
136
116
  // Find first non-null value to infer type
137
- let sample: any = undefined;
117
+ let sample: any;
138
118
  for (const row of input) {
139
- if (row[key] != null) { sample = row[key]; break; }
119
+ if (row[key] != null) {
120
+ sample = row[key];
121
+ break;
122
+ }
140
123
  }
141
124
  const arrowType = inferArrowType(sample);
142
125
  const nullable = input.some((row) => row[key] == null);
@@ -164,19 +147,13 @@ export class HttpStreamSession implements StreamSession {
164
147
  return this._doExchange(inputSchema, [batch]);
165
148
  }
166
149
 
167
- private async _doExchange(
168
- schema: Schema,
169
- batches: RecordBatch[],
170
- ): Promise<Record<string, any>[]> {
150
+ private async _doExchange(schema: Schema, batches: RecordBatch[]): Promise<Record<string, any>[]> {
171
151
  const body = serializeIpcStream(schema, batches);
172
- const resp = await fetch(
173
- `${this._baseUrl}${this._prefix}/${this._method}/exchange`,
174
- {
175
- method: "POST",
176
- headers: this._buildHeaders(),
177
- body: this._prepareBody(body) as unknown as BodyInit,
178
- },
179
- );
152
+ const resp = await fetch(`${this._baseUrl}${this._prefix}/${this._method}/exchange`, {
153
+ method: "POST",
154
+ headers: this._buildHeaders(),
155
+ body: this._prepareBody(body) as unknown as BodyInit,
156
+ });
180
157
 
181
158
  const responseBody = await this._readResponse(resp);
182
159
  const { batches: responseBatches } = await readResponseBatches(responseBody);
@@ -279,14 +256,11 @@ export class HttpStreamSession implements StreamSession {
279
256
  const batch = new RecordBatch(emptySchema, data, metadata);
280
257
  const body = serializeIpcStream(emptySchema, [batch]);
281
258
 
282
- const resp = await fetch(
283
- `${this._baseUrl}${this._prefix}/${this._method}/exchange`,
284
- {
285
- method: "POST",
286
- headers: this._buildHeaders(),
287
- body: this._prepareBody(body) as unknown as BodyInit,
288
- },
289
- );
259
+ const resp = await fetch(`${this._baseUrl}${this._prefix}/${this._method}/exchange`, {
260
+ method: "POST",
261
+ headers: this._buildHeaders(),
262
+ body: this._prepareBody(body) as unknown as BodyInit,
263
+ });
290
264
 
291
265
  return this._readResponse(resp);
292
266
  }
@@ -2,25 +2,25 @@
2
2
  // SPDX-License-Identifier: Apache-2.0
3
3
 
4
4
  import {
5
- Schema,
5
+ Binary,
6
+ Bool,
6
7
  Field,
8
+ makeData,
7
9
  RecordBatch,
10
+ Schema,
11
+ Struct,
8
12
  Utf8,
9
- Bool,
10
- Binary,
11
13
  vectorFromArray,
12
- makeData,
13
- Struct,
14
14
  } from "@query-farm/apache-arrow";
15
- import type { MethodDefinition } from "../types.js";
16
15
  import {
16
+ DESCRIBE_VERSION,
17
+ DESCRIBE_VERSION_KEY,
17
18
  PROTOCOL_NAME_KEY,
18
- REQUEST_VERSION_KEY,
19
19
  REQUEST_VERSION,
20
- DESCRIBE_VERSION_KEY,
21
- DESCRIBE_VERSION,
20
+ REQUEST_VERSION_KEY,
22
21
  SERVER_ID_KEY,
23
22
  } from "../constants.js";
23
+ import type { MethodDefinition } from "../types.js";
24
24
  import { serializeSchema } from "../util/schema.js";
25
25
 
26
26
  /**
@@ -48,9 +48,7 @@ export function buildDescribeBatch(
48
48
  serverId: string,
49
49
  ): { batch: RecordBatch; metadata: Map<string, string> } {
50
50
  // Sort methods by name for consistent ordering
51
- const sortedEntries = [...methods.entries()].sort(([a], [b]) =>
52
- a.localeCompare(b),
53
- );
51
+ const sortedEntries = [...methods.entries()].sort(([a], [b]) => a.localeCompare(b));
54
52
 
55
53
  const names: (string | null)[] = [];
56
54
  const methodTypes: (string | null)[] = [];
@@ -69,8 +67,7 @@ export function buildDescribeBatch(
69
67
  docs.push(method.doc ?? null);
70
68
 
71
69
  // Unary methods with non-empty result schema have a return value
72
- const hasReturn =
73
- method.type === "unary" && method.resultSchema.fields.length > 0;
70
+ const hasReturn = method.type === "unary" && method.resultSchema.fields.length > 0;
74
71
  hasReturns.push(hasReturn);
75
72
 
76
73
  paramsSchemas.push(serializeSchema(method.paramsSchema));
@@ -87,26 +84,17 @@ export function buildDescribeBatch(
87
84
  if (method.defaults && Object.keys(method.defaults).length > 0) {
88
85
  const safe: Record<string, any> = {};
89
86
  for (const [k, v] of Object.entries(method.defaults)) {
90
- if (
91
- v === null ||
92
- typeof v === "string" ||
93
- typeof v === "number" ||
94
- typeof v === "boolean"
95
- ) {
87
+ if (v === null || typeof v === "string" || typeof v === "number" || typeof v === "boolean") {
96
88
  safe[k] = v;
97
89
  }
98
90
  }
99
- paramDefaultsJsons.push(
100
- Object.keys(safe).length > 0 ? JSON.stringify(safe) : null,
101
- );
91
+ paramDefaultsJsons.push(Object.keys(safe).length > 0 ? JSON.stringify(safe) : null);
102
92
  } else {
103
93
  paramDefaultsJsons.push(null);
104
94
  }
105
95
 
106
96
  hasHeaders.push(!!method.headerSchema);
107
- headerSchemas.push(
108
- method.headerSchema ? serializeSchema(method.headerSchema) : null,
109
- );
97
+ headerSchemas.push(method.headerSchema ? serializeSchema(method.headerSchema) : null);
110
98
  }
111
99
 
112
100
  // Build the batch using vectorFromArray for each column
@@ -4,9 +4,10 @@
4
4
  import { Schema } from "@query-farm/apache-arrow";
5
5
  import type { MethodDefinition } from "../types.js";
6
6
  import { OutputCollector } from "../types.js";
7
- import type { IpcStreamWriter } from "../wire/writer.js";
7
+ import { conformBatchToSchema } from "../util/conform.js";
8
8
  import type { IpcStreamReader } from "../wire/reader.js";
9
- import { buildResultBatch, buildErrorBatch } from "../wire/response.js";
9
+ import { buildErrorBatch, buildResultBatch } from "../wire/response.js";
10
+ import type { IpcStreamWriter } from "../wire/writer.js";
10
11
 
11
12
  const EMPTY_SCHEMA = new Schema([]);
12
13
 
@@ -70,16 +71,8 @@ export async function dispatchStream(
70
71
  try {
71
72
  const headerOut = new OutputCollector(method.headerSchema, true, serverId, requestId);
72
73
  const headerValues = method.headerInit(params, state, headerOut);
73
- const headerBatch = buildResultBatch(
74
- method.headerSchema,
75
- headerValues,
76
- serverId,
77
- requestId,
78
- );
79
- const headerBatches = [
80
- ...headerOut.batches.map((b) => b.batch),
81
- headerBatch,
82
- ];
74
+ const headerBatch = buildResultBatch(method.headerSchema, headerValues, serverId, requestId);
75
+ const headerBatches = [...headerOut.batches.map((b) => b.batch), headerBatch];
83
76
  writer.writeStream(method.headerSchema, headerBatches);
84
77
  } catch (error: any) {
85
78
  const errBatch = buildErrorBatch(method.headerSchema, error, serverId, requestId);
@@ -96,12 +89,7 @@ export async function dispatchStream(
96
89
  // Open the input IPC stream (ticks or data from client)
97
90
  const inputSchema = await reader.openNextStream();
98
91
  if (!inputSchema) {
99
- const errBatch = buildErrorBatch(
100
- outputSchema,
101
- new Error("Expected input stream but got EOF"),
102
- serverId,
103
- requestId,
104
- );
92
+ const errBatch = buildErrorBatch(outputSchema, new Error("Expected input stream but got EOF"), serverId, requestId);
105
93
  writer.writeStream(outputSchema, [errBatch]);
106
94
  return;
107
95
  }
@@ -111,11 +99,25 @@ export async function dispatchStream(
111
99
  // same stream. We use IncrementalStream which writes bytes synchronously.
112
100
  const stream = writer.openStream(outputSchema);
113
101
 
102
+ // Expected input schema for casting compatible types (e.g., decimal→double)
103
+ const expectedInputSchema = method.inputSchema;
104
+
114
105
  try {
115
106
  while (true) {
116
- const inputBatch = await reader.readNextBatch();
107
+ let inputBatch = await reader.readNextBatch();
117
108
  if (!inputBatch) break;
118
109
 
110
+ // Cast compatible input types when schema doesn't match exactly
111
+ if (expectedInputSchema && !isProducer && inputBatch.schema !== expectedInputSchema) {
112
+ try {
113
+ inputBatch = conformBatchToSchema(inputBatch, expectedInputSchema);
114
+ } catch {
115
+ throw new TypeError(
116
+ `Input schema mismatch: expected ${expectedInputSchema}, got ${inputBatch.schema}`,
117
+ );
118
+ }
119
+ }
120
+
119
121
  const out = new OutputCollector(outputSchema, effectiveProducer, serverId, requestId);
120
122
 
121
123
  if (isProducer) {
@@ -1,11 +1,10 @@
1
1
  // © Copyright 2025-2026, Query.Farm LLC - https://query.farm
2
2
  // SPDX-License-Identifier: Apache-2.0
3
3
 
4
- import { Schema } from "@query-farm/apache-arrow";
5
4
  import type { MethodDefinition } from "../types.js";
6
5
  import { OutputCollector } from "../types.js";
6
+ import { buildErrorBatch, buildResultBatch } from "../wire/response.js";
7
7
  import type { IpcStreamWriter } from "../wire/writer.js";
8
- import { buildResultBatch, buildErrorBatch } from "../wire/response.js";
9
8
 
10
9
  /**
11
10
  * Dispatch a unary RPC call.
@@ -2,13 +2,12 @@
2
2
  // SPDX-License-Identifier: Apache-2.0
3
3
 
4
4
  import {
5
- RecordBatchStreamWriter,
5
+ type RecordBatch,
6
6
  RecordBatchReader,
7
- RecordBatch,
8
- Schema,
9
- Struct,
10
- makeData,
7
+ RecordBatchStreamWriter,
8
+ type Schema,
11
9
  } from "@query-farm/apache-arrow";
10
+ import { conformBatchToSchema } from "../util/conform.js";
12
11
 
13
12
  export const ARROW_CONTENT_TYPE = "application/vnd.apache.arrow.stream";
14
13
 
@@ -22,39 +21,8 @@ export class HttpRpcError extends Error {
22
21
  }
23
22
  }
24
23
 
25
- /**
26
- * Rebuild a batch's data to match the given schema's field types.
27
- *
28
- * Batches deserialized from IPC streams (e.g., from PyArrow) may use generic
29
- * types (Float) instead of specific ones (Float64). Arrow-JS's
30
- * RecordBatchStreamWriter silently drops batches whose child Data types don't
31
- * match the writer's schema. Cloning each child Data with the schema's field
32
- * type fixes the type metadata while preserving the underlying buffers.
33
- */
34
- function conformBatchToSchema(
35
- batch: RecordBatch,
36
- schema: Schema,
37
- ): RecordBatch {
38
- if (batch.numRows === 0) return batch;
39
- const children = schema.fields.map((f, i) =>
40
- batch.data.children[i].clone(f.type),
41
- );
42
- const structType = new Struct(schema.fields);
43
- const data = makeData({
44
- type: structType,
45
- length: batch.numRows,
46
- children,
47
- nullCount: batch.data.nullCount,
48
- nullBitmap: batch.data.nullBitmap,
49
- });
50
- return new RecordBatch(schema, data, batch.metadata);
51
- }
52
-
53
24
  /** Serialize a schema + batches into a complete IPC stream as Uint8Array. */
54
- export function serializeIpcStream(
55
- schema: Schema,
56
- batches: RecordBatch[],
57
- ): Uint8Array {
25
+ export function serializeIpcStream(schema: Schema, batches: RecordBatch[]): Uint8Array {
58
26
  const writer = new RecordBatchStreamWriter();
59
27
  writer.reset(undefined, schema);
60
28
  for (const batch of batches) {
@@ -72,9 +40,7 @@ export function arrowResponse(body: Uint8Array, status = 200, extraHeaders?: Hea
72
40
  }
73
41
 
74
42
  /** Read schema + first batch from an IPC stream body. */
75
- export async function readRequestFromBody(
76
- body: Uint8Array,
77
- ): Promise<{ schema: Schema; batch: RecordBatch }> {
43
+ export async function readRequestFromBody(body: Uint8Array): Promise<{ schema: Schema; batch: RecordBatch }> {
78
44
  const reader = await RecordBatchReader.from(body);
79
45
  await reader.open();
80
46
  const schema = reader.schema;
@@ -1,24 +1,16 @@
1
1
  // © Copyright 2025-2026, Query.Farm LLC - https://query.farm
2
2
  // SPDX-License-Identifier: Apache-2.0
3
3
 
4
- import { Schema, RecordBatch, RecordBatchReader } from "@query-farm/apache-arrow";
4
+ import { RecordBatch, RecordBatchReader, Schema } from "@query-farm/apache-arrow";
5
+ import { STATE_KEY } from "../constants.js";
6
+ import { buildDescribeBatch, DESCRIBE_SCHEMA } from "../dispatch/describe.js";
5
7
  import type { MethodDefinition } from "../types.js";
6
8
  import { OutputCollector } from "../types.js";
7
- import { parseRequest } from "../wire/request.js";
8
- import {
9
- buildResultBatch,
10
- buildErrorBatch,
11
- buildEmptyBatch,
12
- } from "../wire/response.js";
13
- import { buildDescribeBatch, DESCRIBE_SCHEMA } from "../dispatch/describe.js";
14
- import { STATE_KEY } from "../constants.js";
9
+ import { conformBatchToSchema } from "../util/conform.js";
15
10
  import { serializeSchema } from "../util/schema.js";
16
- import {
17
- HttpRpcError,
18
- serializeIpcStream,
19
- readRequestFromBody,
20
- arrowResponse,
21
- } from "./common.js";
11
+ import { parseRequest } from "../wire/request.js";
12
+ import { buildEmptyBatch, buildErrorBatch, buildResultBatch } from "../wire/response.js";
13
+ import { arrowResponse, HttpRpcError, readRequestFromBody, serializeIpcStream } from "./common.js";
22
14
  import { packStateToken, unpackStateToken } from "./token.js";
23
15
  import type { StateSerializer } from "./types.js";
24
16
 
@@ -60,10 +52,7 @@ export async function httpDispatchUnary(
60
52
  const parsed = parseRequest(reqSchema, reqBatch);
61
53
 
62
54
  if (parsed.methodName !== method.name) {
63
- throw new HttpRpcError(
64
- `Method name in request '${parsed.methodName}' does not match URL '${method.name}'`,
65
- 400,
66
- );
55
+ throw new HttpRpcError(`Method name in request '${parsed.methodName}' does not match URL '${method.name}'`, 400);
67
56
  }
68
57
 
69
58
  const out = new OutputCollector(schema, true, ctx.serverId, parsed.requestId);
@@ -93,10 +82,7 @@ export async function httpDispatchStreamInit(
93
82
  const parsed = parseRequest(reqSchema, reqBatch);
94
83
 
95
84
  if (parsed.methodName !== method.name) {
96
- throw new HttpRpcError(
97
- `Method name in request '${parsed.methodName}' does not match URL '${method.name}'`,
98
- 400,
99
- );
85
+ throw new HttpRpcError(`Method name in request '${parsed.methodName}' does not match URL '${method.name}'`, 400);
100
86
  }
101
87
 
102
88
  // Init state
@@ -121,31 +107,13 @@ export async function httpDispatchStreamInit(
121
107
  let headerBytes: Uint8Array | null = null;
122
108
  if (method.headerSchema && method.headerInit) {
123
109
  try {
124
- const headerOut = new OutputCollector(
125
- method.headerSchema,
126
- true,
127
- ctx.serverId,
128
- parsed.requestId,
129
- );
110
+ const headerOut = new OutputCollector(method.headerSchema, true, ctx.serverId, parsed.requestId);
130
111
  const headerValues = method.headerInit(parsed.params, state, headerOut);
131
- const headerBatch = buildResultBatch(
132
- method.headerSchema,
133
- headerValues,
134
- ctx.serverId,
135
- parsed.requestId,
136
- );
137
- const headerBatches = [
138
- ...headerOut.batches.map((b) => b.batch),
139
- headerBatch,
140
- ];
112
+ const headerBatch = buildResultBatch(method.headerSchema, headerValues, ctx.serverId, parsed.requestId);
113
+ const headerBatches = [...headerOut.batches.map((b) => b.batch), headerBatch];
141
114
  headerBytes = serializeIpcStream(method.headerSchema, headerBatches);
142
115
  } catch (error: any) {
143
- const errBatch = buildErrorBatch(
144
- method.headerSchema,
145
- error,
146
- ctx.serverId,
147
- parsed.requestId,
148
- );
116
+ const errBatch = buildErrorBatch(method.headerSchema, error, ctx.serverId, parsed.requestId);
149
117
  return arrowResponse(serializeIpcStream(method.headerSchema, [errBatch]), 500);
150
118
  }
151
119
  }
@@ -154,26 +122,13 @@ export async function httpDispatchStreamInit(
154
122
  // Producer method — produce data inline in the init response.
155
123
  // For exchange-registered methods acting as producers (__isProducer),
156
124
  // produceStreamResponse falls back to exchangeFn with tick batches.
157
- return produceStreamResponse(
158
- method,
159
- state,
160
- resolvedOutputSchema,
161
- inputSchema,
162
- ctx,
163
- parsed.requestId,
164
- headerBytes,
165
- );
125
+ return produceStreamResponse(method, state, resolvedOutputSchema, inputSchema, ctx, parsed.requestId, headerBytes);
166
126
  } else {
167
127
  // Exchange: serialize state into signed token, return zero-row batch with token
168
128
  const stateBytes = ctx.stateSerializer.serialize(state);
169
129
  const schemaBytes = serializeSchema(resolvedOutputSchema);
170
130
  const inputSchemaBytes = serializeSchema(inputSchema);
171
- const token = packStateToken(
172
- stateBytes,
173
- schemaBytes,
174
- inputSchemaBytes,
175
- ctx.signingKey,
176
- );
131
+ const token = packStateToken(stateBytes, schemaBytes, inputSchemaBytes, ctx.signingKey);
177
132
 
178
133
  const tokenMeta = new Map<string, string>();
179
134
  tokenMeta.set(STATE_KEY, token);
@@ -207,7 +162,7 @@ export async function httpDispatchStreamExchange(
207
162
  throw new HttpRpcError("Missing state token in exchange request", 400);
208
163
  }
209
164
 
210
- let unpacked;
165
+ let unpacked: import("./token.js").UnpackedToken;
211
166
  try {
212
167
  unpacked = unpackStateToken(tokenBase64, ctx.signingKey, ctx.tokenTtl);
213
168
  } catch (error: any) {
@@ -237,34 +192,37 @@ export async function httpDispatchStreamExchange(
237
192
  inputSchema = method.inputSchema ?? EMPTY_SCHEMA;
238
193
  }
239
194
  const effectiveProducer = state?.__isProducer ?? isProducer;
240
- if (process.env.VGI_DISPATCH_DEBUG) console.error(`[httpDispatchStreamExchange] method=${method.name} effectiveProducer=${effectiveProducer} stateKeys=${Object.keys(state || {})}`);
195
+ if (process.env.VGI_DISPATCH_DEBUG)
196
+ console.error(
197
+ `[httpDispatchStreamExchange] method=${method.name} effectiveProducer=${effectiveProducer} stateKeys=${Object.keys(state || {})}`,
198
+ );
241
199
 
242
200
  if (effectiveProducer) {
243
201
  // Producer continuation — produce more data inline.
244
202
  // For exchange-registered methods, falls back to exchangeFn with tick batches.
245
- return produceStreamResponse(
246
- method,
247
- state,
248
- outputSchema,
249
- inputSchema,
250
- ctx,
251
- null,
252
- null,
253
- );
203
+ return produceStreamResponse(method, state, outputSchema, inputSchema, ctx, null, null);
254
204
  } else {
255
205
  // Exchange path — also handles exchange-registered methods acting as
256
206
  // producers (__isProducer=true). Use producer mode on the OutputCollector
257
207
  // when effectiveProducer so finish() is allowed.
258
208
  const out = new OutputCollector(outputSchema, effectiveProducer, ctx.serverId, null);
259
209
 
210
+ // Cast compatible input types (e.g., decimal→double, int32→int64)
211
+ const conformedBatch = conformBatchToSchema(reqBatch, inputSchema);
212
+
260
213
  try {
261
214
  if (method.exchangeFn) {
262
- await method.exchangeFn(state, reqBatch, out);
215
+ await method.exchangeFn(state, conformedBatch, out);
263
216
  } else {
264
217
  await method.producerFn!(state, out);
265
218
  }
266
219
  } catch (error: any) {
267
- if (process.env.VGI_DISPATCH_DEBUG) console.error(`[httpDispatchStreamExchange] exchange handler error:`, error.message, error.stack?.split('\n').slice(0,5).join('\n'));
220
+ if (process.env.VGI_DISPATCH_DEBUG)
221
+ console.error(
222
+ `[httpDispatchStreamExchange] exchange handler error:`,
223
+ error.message,
224
+ error.stack?.split("\n").slice(0, 5).join("\n"),
225
+ );
268
226
  const errBatch = buildErrorBatch(outputSchema, error, ctx.serverId, null);
269
227
  return arrowResponse(serializeIpcStream(outputSchema, [errBatch]), 500);
270
228
  }
@@ -283,12 +241,7 @@ export async function httpDispatchStreamExchange(
283
241
  const stateBytes = ctx.stateSerializer.serialize(state);
284
242
  const schemaBytes = serializeSchema(outputSchema);
285
243
  const inputSchemaBytes = serializeSchema(inputSchema);
286
- const token = packStateToken(
287
- stateBytes,
288
- schemaBytes,
289
- inputSchemaBytes,
290
- ctx.signingKey,
291
- );
244
+ const token = packStateToken(stateBytes, schemaBytes, inputSchemaBytes, ctx.signingKey);
292
245
 
293
246
  for (const emitted of out.batches) {
294
247
  const batch = emitted.batch;
@@ -304,7 +257,7 @@ export async function httpDispatchStreamExchange(
304
257
  // Safety net: if no batch carries a state token (e.g. all rows were
305
258
  // filtered out by pushdown filters), emit an empty batch with the
306
259
  // token so the client knows to continue exchanging.
307
- if (!batches.some(b => b.metadata?.get(STATE_KEY))) {
260
+ if (!batches.some((b) => b.metadata?.get(STATE_KEY))) {
308
261
  const tokenMeta = new Map<string, string>();
309
262
  tokenMeta.set(STATE_KEY, token);
310
263
  batches.push(buildEmptyBatch(outputSchema, tokenMeta));
@@ -344,7 +297,8 @@ async function produceStreamResponse(
344
297
  await method.exchangeFn!(state, tickBatch, out);
345
298
  }
346
299
  } catch (error: any) {
347
- if (process.env.VGI_DISPATCH_DEBUG) console.error(`[produceStreamResponse] error:`, error.message, error.stack?.split('\n').slice(0,3).join('\n'));
300
+ if (process.env.VGI_DISPATCH_DEBUG)
301
+ console.error(`[produceStreamResponse] error:`, error.message, error.stack?.split("\n").slice(0, 3).join("\n"));
348
302
  allBatches.push(buildErrorBatch(outputSchema, error, ctx.serverId, requestId));
349
303
  break;
350
304
  }
@@ -365,12 +319,7 @@ async function produceStreamResponse(
365
319
  const stateBytes = ctx.stateSerializer.serialize(state);
366
320
  const schemaBytes = serializeSchema(outputSchema);
367
321
  const inputSchemaBytes = serializeSchema(inputSchema);
368
- const token = packStateToken(
369
- stateBytes,
370
- schemaBytes,
371
- inputSchemaBytes,
372
- ctx.signingKey,
373
- );
322
+ const token = packStateToken(stateBytes, schemaBytes, inputSchemaBytes, ctx.signingKey);
374
323
  const tokenMeta = new Map<string, string>();
375
324
  tokenMeta.set(STATE_KEY, token);
376
325
  allBatches.push(buildEmptyBatch(outputSchema, tokenMeta));